blob: 4d86c86178f22da8ec8b11957dc1681ed9de16d8 [file] [log] [blame]
Cyrill Gorcunova0727382010-03-11 19:54:39 +03001/*
Lucas De Marchi0d2eb442011-03-17 16:24:16 -03002 * Netburst Performance Events (P4, old Xeon)
Cyrill Gorcunova0727382010-03-11 19:54:39 +03003 */
4
5#ifndef PERF_EVENT_P4_H
6#define PERF_EVENT_P4_H
7
8#include <linux/cpu.h>
9#include <linux/bitops.h>
10
11/*
Lucas De Marchi0d2eb442011-03-17 16:24:16 -030012 * NetBurst has performance MSRs shared between
Cyrill Gorcunova0727382010-03-11 19:54:39 +030013 * threads if HT is turned on, ie for both logical
14 * processors (mem: in turn in Atom with HT support
15 * perf-MSRs are not shared and every thread has its
16 * own perf-MSRs set)
17 */
Cyrill Gorcunovd814f302010-03-24 12:09:26 +080018#define ARCH_P4_TOTAL_ESCR (46)
19#define ARCH_P4_RESERVED_ESCR (2) /* IQ_ESCR(0,1) not always present */
20#define ARCH_P4_MAX_ESCR (ARCH_P4_TOTAL_ESCR - ARCH_P4_RESERVED_ESCR)
21#define ARCH_P4_MAX_CCCR (18)
Cyrill Gorcunova0727382010-03-11 19:54:39 +030022
Cyrill Gorcunov047a3772011-01-07 21:42:06 +030023#define ARCH_P4_CNTRVAL_BITS (40)
24#define ARCH_P4_CNTRVAL_MASK ((1ULL << ARCH_P4_CNTRVAL_BITS) - 1)
Cyrill Gorcunov7d44ec12011-02-16 14:08:02 +030025#define ARCH_P4_UNFLAGGED_BIT ((1ULL) << (ARCH_P4_CNTRVAL_BITS - 1))
Cyrill Gorcunov047a3772011-01-07 21:42:06 +030026
Cyrill Gorcunovd814f302010-03-24 12:09:26 +080027#define P4_ESCR_EVENT_MASK 0x7e000000U
28#define P4_ESCR_EVENT_SHIFT 25
29#define P4_ESCR_EVENTMASK_MASK 0x01fffe00U
30#define P4_ESCR_EVENTMASK_SHIFT 9
31#define P4_ESCR_TAG_MASK 0x000001e0U
32#define P4_ESCR_TAG_SHIFT 5
33#define P4_ESCR_TAG_ENABLE 0x00000010U
34#define P4_ESCR_T0_OS 0x00000008U
35#define P4_ESCR_T0_USR 0x00000004U
36#define P4_ESCR_T1_OS 0x00000002U
37#define P4_ESCR_T1_USR 0x00000001U
38
39#define P4_ESCR_EVENT(v) ((v) << P4_ESCR_EVENT_SHIFT)
40#define P4_ESCR_EMASK(v) ((v) << P4_ESCR_EVENTMASK_SHIFT)
41#define P4_ESCR_TAG(v) ((v) << P4_ESCR_TAG_SHIFT)
Cyrill Gorcunova0727382010-03-11 19:54:39 +030042
Cyrill Gorcunova0727382010-03-11 19:54:39 +030043#define P4_CCCR_OVF 0x80000000U
44#define P4_CCCR_CASCADE 0x40000000U
45#define P4_CCCR_OVF_PMI_T0 0x04000000U
46#define P4_CCCR_OVF_PMI_T1 0x08000000U
47#define P4_CCCR_FORCE_OVF 0x02000000U
48#define P4_CCCR_EDGE 0x01000000U
49#define P4_CCCR_THRESHOLD_MASK 0x00f00000U
50#define P4_CCCR_THRESHOLD_SHIFT 20
Cyrill Gorcunova0727382010-03-11 19:54:39 +030051#define P4_CCCR_COMPLEMENT 0x00080000U
52#define P4_CCCR_COMPARE 0x00040000U
53#define P4_CCCR_ESCR_SELECT_MASK 0x0000e000U
54#define P4_CCCR_ESCR_SELECT_SHIFT 13
55#define P4_CCCR_ENABLE 0x00001000U
56#define P4_CCCR_THREAD_SINGLE 0x00010000U
57#define P4_CCCR_THREAD_BOTH 0x00020000U
58#define P4_CCCR_THREAD_ANY 0x00030000U
Lin Mingf34edbc2010-03-18 18:33:07 +080059#define P4_CCCR_RESERVED 0x00000fffU
Cyrill Gorcunova0727382010-03-11 19:54:39 +030060
Cyrill Gorcunovd814f302010-03-24 12:09:26 +080061#define P4_CCCR_THRESHOLD(v) ((v) << P4_CCCR_THRESHOLD_SHIFT)
62#define P4_CCCR_ESEL(v) ((v) << P4_CCCR_ESCR_SELECT_SHIFT)
63
Cyrill Gorcunovd814f302010-03-24 12:09:26 +080064#define P4_GEN_ESCR_EMASK(class, name, bit) \
65 class##__##name = ((1 << bit) << P4_ESCR_EVENTMASK_SHIFT)
66#define P4_ESCR_EMASK_BIT(class, name) class##__##name
Cyrill Gorcunova0727382010-03-11 19:54:39 +030067
68/*
69 * config field is 64bit width and consists of
70 * HT << 63 | ESCR << 32 | CCCR
71 * where HT is HyperThreading bit (since ESCR
72 * has it reserved we may use it for own purpose)
73 *
74 * note that this is NOT the addresses of respective
75 * ESCR and CCCR but rather an only packed value should
76 * be unpacked and written to a proper addresses
77 *
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +080078 * the base idea is to pack as much info as possible
Cyrill Gorcunova0727382010-03-11 19:54:39 +030079 */
80#define p4_config_pack_escr(v) (((u64)(v)) << 32)
81#define p4_config_pack_cccr(v) (((u64)(v)) & 0xffffffffULL)
82#define p4_config_unpack_escr(v) (((u64)(v)) >> 32)
Cyrill Gorcunovd814f302010-03-24 12:09:26 +080083#define p4_config_unpack_cccr(v) (((u64)(v)) & 0xffffffffULL)
Cyrill Gorcunova0727382010-03-11 19:54:39 +030084
85#define p4_config_unpack_emask(v) \
86 ({ \
87 u32 t = p4_config_unpack_escr((v)); \
Cyrill Gorcunovd814f302010-03-24 12:09:26 +080088 t = t & P4_ESCR_EVENTMASK_MASK; \
89 t = t >> P4_ESCR_EVENTMASK_SHIFT; \
Cyrill Gorcunova0727382010-03-11 19:54:39 +030090 t; \
91 })
92
Cyrill Gorcunovd814f302010-03-24 12:09:26 +080093#define p4_config_unpack_event(v) \
94 ({ \
95 u32 t = p4_config_unpack_escr((v)); \
96 t = t & P4_ESCR_EVENT_MASK; \
97 t = t >> P4_ESCR_EVENT_SHIFT; \
98 t; \
99 })
100
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300101#define P4_CONFIG_HT_SHIFT 63
102#define P4_CONFIG_HT (1ULL << P4_CONFIG_HT_SHIFT)
103
Cyrill Gorcunovc9cf4a02010-08-25 22:23:34 +0400104/*
Cyrill Gorcunovf9129872011-07-09 00:17:12 +0400105 * If an event has alias it should be marked
106 * with a special bit. (Don't forget to check
107 * P4_PEBS_CONFIG_MASK and related bits on
108 * modification.)
109 */
110#define P4_CONFIG_ALIASABLE (1 << 9)
111
112/*
Cyrill Gorcunovc9cf4a02010-08-25 22:23:34 +0400113 * The bits we allow to pass for RAW events
114 */
115#define P4_CONFIG_MASK_ESCR \
116 P4_ESCR_EVENT_MASK | \
117 P4_ESCR_EVENTMASK_MASK | \
118 P4_ESCR_TAG_MASK | \
119 P4_ESCR_TAG_ENABLE
120
121#define P4_CONFIG_MASK_CCCR \
122 P4_CCCR_EDGE | \
123 P4_CCCR_THRESHOLD_MASK | \
124 P4_CCCR_COMPLEMENT | \
125 P4_CCCR_COMPARE | \
126 P4_CCCR_THREAD_ANY | \
127 P4_CCCR_RESERVED
128
129/* some dangerous bits are reserved for kernel internals */
130#define P4_CONFIG_MASK \
131 (p4_config_pack_escr(P4_CONFIG_MASK_ESCR)) | \
132 (p4_config_pack_cccr(P4_CONFIG_MASK_CCCR))
133
Cyrill Gorcunovf9129872011-07-09 00:17:12 +0400134/*
135 * In case of event aliasing we need to preserve some
136 * caller bits otherwise the mapping won't be complete.
137 */
138#define P4_CONFIG_EVENT_ALIAS_MASK \
139 (p4_config_pack_escr(P4_CONFIG_MASK_ESCR) | \
140 p4_config_pack_cccr(P4_CCCR_EDGE | \
141 P4_CCCR_THRESHOLD_MASK | \
142 P4_CCCR_COMPLEMENT | \
143 P4_CCCR_COMPARE))
144
145#define P4_CONFIG_EVENT_ALIAS_IMMUTABLE_BITS \
146 ((P4_CONFIG_HT) | \
147 p4_config_pack_escr(P4_ESCR_T0_OS | \
148 P4_ESCR_T0_USR | \
149 P4_ESCR_T1_OS | \
150 P4_ESCR_T1_USR) | \
151 p4_config_pack_cccr(P4_CCCR_OVF | \
152 P4_CCCR_CASCADE | \
153 P4_CCCR_FORCE_OVF | \
154 P4_CCCR_THREAD_ANY | \
155 P4_CCCR_OVF_PMI_T0 | \
156 P4_CCCR_OVF_PMI_T1 | \
157 P4_CONFIG_ALIASABLE))
158
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300159static inline bool p4_is_event_cascaded(u64 config)
160{
161 u32 cccr = p4_config_unpack_cccr(config);
162 return !!(cccr & P4_CCCR_CASCADE);
163}
164
165static inline int p4_ht_config_thread(u64 config)
166{
167 return !!(config & P4_CONFIG_HT);
168}
169
170static inline u64 p4_set_ht_bit(u64 config)
171{
172 return config | P4_CONFIG_HT;
173}
174
175static inline u64 p4_clear_ht_bit(u64 config)
176{
177 return config & ~P4_CONFIG_HT;
178}
179
180static inline int p4_ht_active(void)
181{
182#ifdef CONFIG_SMP
183 return smp_num_siblings > 1;
184#endif
185 return 0;
186}
187
188static inline int p4_ht_thread(int cpu)
189{
190#ifdef CONFIG_SMP
191 if (smp_num_siblings == 2)
192 return cpu != cpumask_first(__get_cpu_var(cpu_sibling_map));
193#endif
194 return 0;
195}
196
197static inline int p4_should_swap_ts(u64 config, int cpu)
198{
199 return p4_ht_config_thread(config) ^ p4_ht_thread(cpu);
200}
201
202static inline u32 p4_default_cccr_conf(int cpu)
203{
204 /*
205 * Note that P4_CCCR_THREAD_ANY is "required" on
206 * non-HT machines (on HT machines we count TS events
207 * regardless the state of second logical processor
208 */
209 u32 cccr = P4_CCCR_THREAD_ANY;
210
211 if (!p4_ht_thread(cpu))
212 cccr |= P4_CCCR_OVF_PMI_T0;
213 else
214 cccr |= P4_CCCR_OVF_PMI_T1;
215
216 return cccr;
217}
218
219static inline u32 p4_default_escr_conf(int cpu, int exclude_os, int exclude_usr)
220{
221 u32 escr = 0;
222
223 if (!p4_ht_thread(cpu)) {
224 if (!exclude_os)
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800225 escr |= P4_ESCR_T0_OS;
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300226 if (!exclude_usr)
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800227 escr |= P4_ESCR_T0_USR;
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300228 } else {
229 if (!exclude_os)
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800230 escr |= P4_ESCR_T1_OS;
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300231 if (!exclude_usr)
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800232 escr |= P4_ESCR_T1_USR;
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300233 }
234
235 return escr;
236}
237
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +0800238/*
239 * This are the events which should be used in "Event Select"
240 * field of ESCR register, they are like unique keys which allow
241 * the kernel to determinate which CCCR and COUNTER should be
242 * used to track an event
243 */
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800244enum P4_EVENTS {
245 P4_EVENT_TC_DELIVER_MODE,
246 P4_EVENT_BPU_FETCH_REQUEST,
247 P4_EVENT_ITLB_REFERENCE,
248 P4_EVENT_MEMORY_CANCEL,
249 P4_EVENT_MEMORY_COMPLETE,
250 P4_EVENT_LOAD_PORT_REPLAY,
251 P4_EVENT_STORE_PORT_REPLAY,
252 P4_EVENT_MOB_LOAD_REPLAY,
253 P4_EVENT_PAGE_WALK_TYPE,
254 P4_EVENT_BSQ_CACHE_REFERENCE,
255 P4_EVENT_IOQ_ALLOCATION,
256 P4_EVENT_IOQ_ACTIVE_ENTRIES,
257 P4_EVENT_FSB_DATA_ACTIVITY,
258 P4_EVENT_BSQ_ALLOCATION,
259 P4_EVENT_BSQ_ACTIVE_ENTRIES,
260 P4_EVENT_SSE_INPUT_ASSIST,
261 P4_EVENT_PACKED_SP_UOP,
262 P4_EVENT_PACKED_DP_UOP,
263 P4_EVENT_SCALAR_SP_UOP,
264 P4_EVENT_SCALAR_DP_UOP,
265 P4_EVENT_64BIT_MMX_UOP,
266 P4_EVENT_128BIT_MMX_UOP,
267 P4_EVENT_X87_FP_UOP,
268 P4_EVENT_TC_MISC,
269 P4_EVENT_GLOBAL_POWER_EVENTS,
270 P4_EVENT_TC_MS_XFER,
271 P4_EVENT_UOP_QUEUE_WRITES,
272 P4_EVENT_RETIRED_MISPRED_BRANCH_TYPE,
273 P4_EVENT_RETIRED_BRANCH_TYPE,
274 P4_EVENT_RESOURCE_STALL,
275 P4_EVENT_WC_BUFFER,
276 P4_EVENT_B2B_CYCLES,
277 P4_EVENT_BNR,
278 P4_EVENT_SNOOP,
279 P4_EVENT_RESPONSE,
280 P4_EVENT_FRONT_END_EVENT,
281 P4_EVENT_EXECUTION_EVENT,
282 P4_EVENT_REPLAY_EVENT,
283 P4_EVENT_INSTR_RETIRED,
284 P4_EVENT_UOPS_RETIRED,
285 P4_EVENT_UOP_TYPE,
286 P4_EVENT_BRANCH_RETIRED,
287 P4_EVENT_MISPRED_BRANCH_RETIRED,
288 P4_EVENT_X87_ASSIST,
289 P4_EVENT_MACHINE_CLEAR,
290 P4_EVENT_INSTR_COMPLETED,
291};
292
293#define P4_OPCODE(event) event##_OPCODE
294#define P4_OPCODE_ESEL(opcode) ((opcode & 0x00ff) >> 0)
295#define P4_OPCODE_EVNT(opcode) ((opcode & 0xff00) >> 8)
296#define P4_OPCODE_PACK(event, sel) (((event) << 8) | sel)
297
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300298/*
299 * Comments below the event represent ESCR restriction
300 * for this event and counter index per ESCR
301 *
302 * MSR_P4_IQ_ESCR0 and MSR_P4_IQ_ESCR1 are available only on early
303 * processor builds (family 0FH, models 01H-02H). These MSRs
304 * are not available on later versions, so that we don't use
305 * them completely
306 *
307 * Also note that CCCR1 do not have P4_CCCR_ENABLE bit properly
308 * working so that we should not use this CCCR and respective
309 * counter as result
310 */
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800311enum P4_EVENT_OPCODES {
312 P4_OPCODE(P4_EVENT_TC_DELIVER_MODE) = P4_OPCODE_PACK(0x01, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300313 /*
314 * MSR_P4_TC_ESCR0: 4, 5
315 * MSR_P4_TC_ESCR1: 6, 7
316 */
317
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800318 P4_OPCODE(P4_EVENT_BPU_FETCH_REQUEST) = P4_OPCODE_PACK(0x03, 0x00),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300319 /*
320 * MSR_P4_BPU_ESCR0: 0, 1
321 * MSR_P4_BPU_ESCR1: 2, 3
322 */
323
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800324 P4_OPCODE(P4_EVENT_ITLB_REFERENCE) = P4_OPCODE_PACK(0x18, 0x03),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300325 /*
326 * MSR_P4_ITLB_ESCR0: 0, 1
327 * MSR_P4_ITLB_ESCR1: 2, 3
328 */
329
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800330 P4_OPCODE(P4_EVENT_MEMORY_CANCEL) = P4_OPCODE_PACK(0x02, 0x05),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300331 /*
332 * MSR_P4_DAC_ESCR0: 8, 9
333 * MSR_P4_DAC_ESCR1: 10, 11
334 */
335
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800336 P4_OPCODE(P4_EVENT_MEMORY_COMPLETE) = P4_OPCODE_PACK(0x08, 0x02),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300337 /*
338 * MSR_P4_SAAT_ESCR0: 8, 9
339 * MSR_P4_SAAT_ESCR1: 10, 11
340 */
341
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800342 P4_OPCODE(P4_EVENT_LOAD_PORT_REPLAY) = P4_OPCODE_PACK(0x04, 0x02),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300343 /*
344 * MSR_P4_SAAT_ESCR0: 8, 9
345 * MSR_P4_SAAT_ESCR1: 10, 11
346 */
347
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800348 P4_OPCODE(P4_EVENT_STORE_PORT_REPLAY) = P4_OPCODE_PACK(0x05, 0x02),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300349 /*
350 * MSR_P4_SAAT_ESCR0: 8, 9
351 * MSR_P4_SAAT_ESCR1: 10, 11
352 */
353
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800354 P4_OPCODE(P4_EVENT_MOB_LOAD_REPLAY) = P4_OPCODE_PACK(0x03, 0x02),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300355 /*
356 * MSR_P4_MOB_ESCR0: 0, 1
357 * MSR_P4_MOB_ESCR1: 2, 3
358 */
359
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800360 P4_OPCODE(P4_EVENT_PAGE_WALK_TYPE) = P4_OPCODE_PACK(0x01, 0x04),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300361 /*
362 * MSR_P4_PMH_ESCR0: 0, 1
363 * MSR_P4_PMH_ESCR1: 2, 3
364 */
365
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800366 P4_OPCODE(P4_EVENT_BSQ_CACHE_REFERENCE) = P4_OPCODE_PACK(0x0c, 0x07),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300367 /*
368 * MSR_P4_BSU_ESCR0: 0, 1
369 * MSR_P4_BSU_ESCR1: 2, 3
370 */
371
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800372 P4_OPCODE(P4_EVENT_IOQ_ALLOCATION) = P4_OPCODE_PACK(0x03, 0x06),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300373 /*
374 * MSR_P4_FSB_ESCR0: 0, 1
375 * MSR_P4_FSB_ESCR1: 2, 3
376 */
377
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800378 P4_OPCODE(P4_EVENT_IOQ_ACTIVE_ENTRIES) = P4_OPCODE_PACK(0x1a, 0x06),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300379 /*
380 * MSR_P4_FSB_ESCR1: 2, 3
381 */
382
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800383 P4_OPCODE(P4_EVENT_FSB_DATA_ACTIVITY) = P4_OPCODE_PACK(0x17, 0x06),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300384 /*
385 * MSR_P4_FSB_ESCR0: 0, 1
386 * MSR_P4_FSB_ESCR1: 2, 3
387 */
388
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800389 P4_OPCODE(P4_EVENT_BSQ_ALLOCATION) = P4_OPCODE_PACK(0x05, 0x07),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300390 /*
391 * MSR_P4_BSU_ESCR0: 0, 1
392 */
393
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800394 P4_OPCODE(P4_EVENT_BSQ_ACTIVE_ENTRIES) = P4_OPCODE_PACK(0x06, 0x07),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300395 /*
Lin Ming8ea7f542010-03-16 10:12:36 +0800396 * NOTE: no ESCR name in docs, it's guessed
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300397 * MSR_P4_BSU_ESCR1: 2, 3
398 */
399
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800400 P4_OPCODE(P4_EVENT_SSE_INPUT_ASSIST) = P4_OPCODE_PACK(0x34, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300401 /*
Cyrill Gorcunove4495262010-03-15 12:58:22 +0800402 * MSR_P4_FIRM_ESCR0: 8, 9
403 * MSR_P4_FIRM_ESCR1: 10, 11
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300404 */
405
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800406 P4_OPCODE(P4_EVENT_PACKED_SP_UOP) = P4_OPCODE_PACK(0x08, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300407 /*
408 * MSR_P4_FIRM_ESCR0: 8, 9
409 * MSR_P4_FIRM_ESCR1: 10, 11
410 */
411
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800412 P4_OPCODE(P4_EVENT_PACKED_DP_UOP) = P4_OPCODE_PACK(0x0c, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300413 /*
414 * MSR_P4_FIRM_ESCR0: 8, 9
415 * MSR_P4_FIRM_ESCR1: 10, 11
416 */
417
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800418 P4_OPCODE(P4_EVENT_SCALAR_SP_UOP) = P4_OPCODE_PACK(0x0a, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300419 /*
420 * MSR_P4_FIRM_ESCR0: 8, 9
421 * MSR_P4_FIRM_ESCR1: 10, 11
422 */
423
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800424 P4_OPCODE(P4_EVENT_SCALAR_DP_UOP) = P4_OPCODE_PACK(0x0e, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300425 /*
426 * MSR_P4_FIRM_ESCR0: 8, 9
427 * MSR_P4_FIRM_ESCR1: 10, 11
428 */
429
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800430 P4_OPCODE(P4_EVENT_64BIT_MMX_UOP) = P4_OPCODE_PACK(0x02, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300431 /*
432 * MSR_P4_FIRM_ESCR0: 8, 9
433 * MSR_P4_FIRM_ESCR1: 10, 11
434 */
435
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800436 P4_OPCODE(P4_EVENT_128BIT_MMX_UOP) = P4_OPCODE_PACK(0x1a, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300437 /*
438 * MSR_P4_FIRM_ESCR0: 8, 9
439 * MSR_P4_FIRM_ESCR1: 10, 11
440 */
441
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800442 P4_OPCODE(P4_EVENT_X87_FP_UOP) = P4_OPCODE_PACK(0x04, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300443 /*
444 * MSR_P4_FIRM_ESCR0: 8, 9
445 * MSR_P4_FIRM_ESCR1: 10, 11
446 */
447
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800448 P4_OPCODE(P4_EVENT_TC_MISC) = P4_OPCODE_PACK(0x06, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300449 /*
450 * MSR_P4_TC_ESCR0: 4, 5
451 * MSR_P4_TC_ESCR1: 6, 7
452 */
453
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800454 P4_OPCODE(P4_EVENT_GLOBAL_POWER_EVENTS) = P4_OPCODE_PACK(0x13, 0x06),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300455 /*
456 * MSR_P4_FSB_ESCR0: 0, 1
457 * MSR_P4_FSB_ESCR1: 2, 3
458 */
459
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800460 P4_OPCODE(P4_EVENT_TC_MS_XFER) = P4_OPCODE_PACK(0x05, 0x00),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300461 /*
462 * MSR_P4_MS_ESCR0: 4, 5
463 * MSR_P4_MS_ESCR1: 6, 7
464 */
465
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800466 P4_OPCODE(P4_EVENT_UOP_QUEUE_WRITES) = P4_OPCODE_PACK(0x09, 0x00),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300467 /*
468 * MSR_P4_MS_ESCR0: 4, 5
469 * MSR_P4_MS_ESCR1: 6, 7
470 */
471
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800472 P4_OPCODE(P4_EVENT_RETIRED_MISPRED_BRANCH_TYPE) = P4_OPCODE_PACK(0x05, 0x02),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300473 /*
474 * MSR_P4_TBPU_ESCR0: 4, 5
Cyrill Gorcunov9c8c6ba2010-03-19 00:12:56 +0300475 * MSR_P4_TBPU_ESCR1: 6, 7
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300476 */
477
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800478 P4_OPCODE(P4_EVENT_RETIRED_BRANCH_TYPE) = P4_OPCODE_PACK(0x04, 0x02),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300479 /*
480 * MSR_P4_TBPU_ESCR0: 4, 5
Cyrill Gorcunov9c8c6ba2010-03-19 00:12:56 +0300481 * MSR_P4_TBPU_ESCR1: 6, 7
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300482 */
483
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800484 P4_OPCODE(P4_EVENT_RESOURCE_STALL) = P4_OPCODE_PACK(0x01, 0x01),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300485 /*
486 * MSR_P4_ALF_ESCR0: 12, 13, 16
487 * MSR_P4_ALF_ESCR1: 14, 15, 17
488 */
489
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800490 P4_OPCODE(P4_EVENT_WC_BUFFER) = P4_OPCODE_PACK(0x05, 0x05),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300491 /*
492 * MSR_P4_DAC_ESCR0: 8, 9
493 * MSR_P4_DAC_ESCR1: 10, 11
494 */
495
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800496 P4_OPCODE(P4_EVENT_B2B_CYCLES) = P4_OPCODE_PACK(0x16, 0x03),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300497 /*
498 * MSR_P4_FSB_ESCR0: 0, 1
499 * MSR_P4_FSB_ESCR1: 2, 3
500 */
501
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800502 P4_OPCODE(P4_EVENT_BNR) = P4_OPCODE_PACK(0x08, 0x03),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300503 /*
504 * MSR_P4_FSB_ESCR0: 0, 1
505 * MSR_P4_FSB_ESCR1: 2, 3
506 */
507
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800508 P4_OPCODE(P4_EVENT_SNOOP) = P4_OPCODE_PACK(0x06, 0x03),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300509 /*
510 * MSR_P4_FSB_ESCR0: 0, 1
511 * MSR_P4_FSB_ESCR1: 2, 3
512 */
513
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800514 P4_OPCODE(P4_EVENT_RESPONSE) = P4_OPCODE_PACK(0x04, 0x03),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300515 /*
516 * MSR_P4_FSB_ESCR0: 0, 1
517 * MSR_P4_FSB_ESCR1: 2, 3
518 */
519
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800520 P4_OPCODE(P4_EVENT_FRONT_END_EVENT) = P4_OPCODE_PACK(0x08, 0x05),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300521 /*
522 * MSR_P4_CRU_ESCR2: 12, 13, 16
523 * MSR_P4_CRU_ESCR3: 14, 15, 17
524 */
525
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800526 P4_OPCODE(P4_EVENT_EXECUTION_EVENT) = P4_OPCODE_PACK(0x0c, 0x05),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300527 /*
528 * MSR_P4_CRU_ESCR2: 12, 13, 16
529 * MSR_P4_CRU_ESCR3: 14, 15, 17
530 */
531
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800532 P4_OPCODE(P4_EVENT_REPLAY_EVENT) = P4_OPCODE_PACK(0x09, 0x05),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300533 /*
534 * MSR_P4_CRU_ESCR2: 12, 13, 16
535 * MSR_P4_CRU_ESCR3: 14, 15, 17
536 */
537
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800538 P4_OPCODE(P4_EVENT_INSTR_RETIRED) = P4_OPCODE_PACK(0x02, 0x04),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300539 /*
Cyrill Gorcunove4495262010-03-15 12:58:22 +0800540 * MSR_P4_CRU_ESCR0: 12, 13, 16
541 * MSR_P4_CRU_ESCR1: 14, 15, 17
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300542 */
543
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800544 P4_OPCODE(P4_EVENT_UOPS_RETIRED) = P4_OPCODE_PACK(0x01, 0x04),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300545 /*
Lin Ming8ea7f542010-03-16 10:12:36 +0800546 * MSR_P4_CRU_ESCR0: 12, 13, 16
547 * MSR_P4_CRU_ESCR1: 14, 15, 17
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300548 */
549
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800550 P4_OPCODE(P4_EVENT_UOP_TYPE) = P4_OPCODE_PACK(0x02, 0x02),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300551 /*
552 * MSR_P4_RAT_ESCR0: 12, 13, 16
553 * MSR_P4_RAT_ESCR1: 14, 15, 17
554 */
555
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800556 P4_OPCODE(P4_EVENT_BRANCH_RETIRED) = P4_OPCODE_PACK(0x06, 0x05),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300557 /*
558 * MSR_P4_CRU_ESCR2: 12, 13, 16
559 * MSR_P4_CRU_ESCR3: 14, 15, 17
560 */
561
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800562 P4_OPCODE(P4_EVENT_MISPRED_BRANCH_RETIRED) = P4_OPCODE_PACK(0x03, 0x04),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300563 /*
564 * MSR_P4_CRU_ESCR0: 12, 13, 16
565 * MSR_P4_CRU_ESCR1: 14, 15, 17
566 */
567
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800568 P4_OPCODE(P4_EVENT_X87_ASSIST) = P4_OPCODE_PACK(0x03, 0x05),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300569 /*
570 * MSR_P4_CRU_ESCR2: 12, 13, 16
571 * MSR_P4_CRU_ESCR3: 14, 15, 17
572 */
573
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800574 P4_OPCODE(P4_EVENT_MACHINE_CLEAR) = P4_OPCODE_PACK(0x02, 0x05),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300575 /*
576 * MSR_P4_CRU_ESCR2: 12, 13, 16
577 * MSR_P4_CRU_ESCR3: 14, 15, 17
578 */
579
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800580 P4_OPCODE(P4_EVENT_INSTR_COMPLETED) = P4_OPCODE_PACK(0x07, 0x04),
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300581 /*
582 * MSR_P4_CRU_ESCR0: 12, 13, 16
583 * MSR_P4_CRU_ESCR1: 14, 15, 17
584 */
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300585};
586
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800587/*
588 * a caller should use P4_ESCR_EMASK_NAME helper to
589 * pick the EventMask needed, for example
590 *
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +0800591 * P4_ESCR_EMASK_BIT(P4_EVENT_TC_DELIVER_MODE, DD)
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800592 */
593enum P4_ESCR_EMASKS {
594 P4_GEN_ESCR_EMASK(P4_EVENT_TC_DELIVER_MODE, DD, 0),
595 P4_GEN_ESCR_EMASK(P4_EVENT_TC_DELIVER_MODE, DB, 1),
596 P4_GEN_ESCR_EMASK(P4_EVENT_TC_DELIVER_MODE, DI, 2),
597 P4_GEN_ESCR_EMASK(P4_EVENT_TC_DELIVER_MODE, BD, 3),
598 P4_GEN_ESCR_EMASK(P4_EVENT_TC_DELIVER_MODE, BB, 4),
599 P4_GEN_ESCR_EMASK(P4_EVENT_TC_DELIVER_MODE, BI, 5),
600 P4_GEN_ESCR_EMASK(P4_EVENT_TC_DELIVER_MODE, ID, 6),
601
602 P4_GEN_ESCR_EMASK(P4_EVENT_BPU_FETCH_REQUEST, TCMISS, 0),
603
604 P4_GEN_ESCR_EMASK(P4_EVENT_ITLB_REFERENCE, HIT, 0),
605 P4_GEN_ESCR_EMASK(P4_EVENT_ITLB_REFERENCE, MISS, 1),
606 P4_GEN_ESCR_EMASK(P4_EVENT_ITLB_REFERENCE, HIT_UK, 2),
607
608 P4_GEN_ESCR_EMASK(P4_EVENT_MEMORY_CANCEL, ST_RB_FULL, 2),
609 P4_GEN_ESCR_EMASK(P4_EVENT_MEMORY_CANCEL, 64K_CONF, 3),
610
611 P4_GEN_ESCR_EMASK(P4_EVENT_MEMORY_COMPLETE, LSC, 0),
612 P4_GEN_ESCR_EMASK(P4_EVENT_MEMORY_COMPLETE, SSC, 1),
613
614 P4_GEN_ESCR_EMASK(P4_EVENT_LOAD_PORT_REPLAY, SPLIT_LD, 1),
615
616 P4_GEN_ESCR_EMASK(P4_EVENT_STORE_PORT_REPLAY, SPLIT_ST, 1),
617
618 P4_GEN_ESCR_EMASK(P4_EVENT_MOB_LOAD_REPLAY, NO_STA, 1),
619 P4_GEN_ESCR_EMASK(P4_EVENT_MOB_LOAD_REPLAY, NO_STD, 3),
620 P4_GEN_ESCR_EMASK(P4_EVENT_MOB_LOAD_REPLAY, PARTIAL_DATA, 4),
621 P4_GEN_ESCR_EMASK(P4_EVENT_MOB_LOAD_REPLAY, UNALGN_ADDR, 5),
622
623 P4_GEN_ESCR_EMASK(P4_EVENT_PAGE_WALK_TYPE, DTMISS, 0),
624 P4_GEN_ESCR_EMASK(P4_EVENT_PAGE_WALK_TYPE, ITMISS, 1),
625
626 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_CACHE_REFERENCE, RD_2ndL_HITS, 0),
627 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_CACHE_REFERENCE, RD_2ndL_HITE, 1),
628 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_CACHE_REFERENCE, RD_2ndL_HITM, 2),
629 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_CACHE_REFERENCE, RD_3rdL_HITS, 3),
630 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_CACHE_REFERENCE, RD_3rdL_HITE, 4),
631 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_CACHE_REFERENCE, RD_3rdL_HITM, 5),
632 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_CACHE_REFERENCE, RD_2ndL_MISS, 8),
633 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_CACHE_REFERENCE, RD_3rdL_MISS, 9),
634 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_CACHE_REFERENCE, WR_2ndL_MISS, 10),
635
636 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, DEFAULT, 0),
637 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, ALL_READ, 5),
638 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, ALL_WRITE, 6),
639 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, MEM_UC, 7),
640 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, MEM_WC, 8),
641 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, MEM_WT, 9),
642 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, MEM_WP, 10),
643 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, MEM_WB, 11),
644 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, OWN, 13),
645 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, OTHER, 14),
646 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ALLOCATION, PREFETCH, 15),
647
648 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, DEFAULT, 0),
649 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, ALL_READ, 5),
650 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, ALL_WRITE, 6),
651 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, MEM_UC, 7),
652 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, MEM_WC, 8),
653 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, MEM_WT, 9),
654 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, MEM_WP, 10),
655 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, MEM_WB, 11),
656 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, OWN, 13),
657 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, OTHER, 14),
658 P4_GEN_ESCR_EMASK(P4_EVENT_IOQ_ACTIVE_ENTRIES, PREFETCH, 15),
659
660 P4_GEN_ESCR_EMASK(P4_EVENT_FSB_DATA_ACTIVITY, DRDY_DRV, 0),
661 P4_GEN_ESCR_EMASK(P4_EVENT_FSB_DATA_ACTIVITY, DRDY_OWN, 1),
662 P4_GEN_ESCR_EMASK(P4_EVENT_FSB_DATA_ACTIVITY, DRDY_OTHER, 2),
663 P4_GEN_ESCR_EMASK(P4_EVENT_FSB_DATA_ACTIVITY, DBSY_DRV, 3),
664 P4_GEN_ESCR_EMASK(P4_EVENT_FSB_DATA_ACTIVITY, DBSY_OWN, 4),
665 P4_GEN_ESCR_EMASK(P4_EVENT_FSB_DATA_ACTIVITY, DBSY_OTHER, 5),
666
667 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, REQ_TYPE0, 0),
668 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, REQ_TYPE1, 1),
669 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, REQ_LEN0, 2),
670 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, REQ_LEN1, 3),
671 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, REQ_IO_TYPE, 5),
672 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, REQ_LOCK_TYPE, 6),
673 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, REQ_CACHE_TYPE, 7),
674 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, REQ_SPLIT_TYPE, 8),
675 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, REQ_DEM_TYPE, 9),
676 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, REQ_ORD_TYPE, 10),
677 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, MEM_TYPE0, 11),
678 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, MEM_TYPE1, 12),
679 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ALLOCATION, MEM_TYPE2, 13),
680
681 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, REQ_TYPE0, 0),
682 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, REQ_TYPE1, 1),
683 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, REQ_LEN0, 2),
684 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, REQ_LEN1, 3),
685 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, REQ_IO_TYPE, 5),
686 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, REQ_LOCK_TYPE, 6),
687 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, REQ_CACHE_TYPE, 7),
688 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, REQ_SPLIT_TYPE, 8),
689 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, REQ_DEM_TYPE, 9),
690 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, REQ_ORD_TYPE, 10),
691 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, MEM_TYPE0, 11),
692 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, MEM_TYPE1, 12),
693 P4_GEN_ESCR_EMASK(P4_EVENT_BSQ_ACTIVE_ENTRIES, MEM_TYPE2, 13),
694
695 P4_GEN_ESCR_EMASK(P4_EVENT_SSE_INPUT_ASSIST, ALL, 15),
696
697 P4_GEN_ESCR_EMASK(P4_EVENT_PACKED_SP_UOP, ALL, 15),
698
699 P4_GEN_ESCR_EMASK(P4_EVENT_PACKED_DP_UOP, ALL, 15),
700
701 P4_GEN_ESCR_EMASK(P4_EVENT_SCALAR_SP_UOP, ALL, 15),
702
703 P4_GEN_ESCR_EMASK(P4_EVENT_SCALAR_DP_UOP, ALL, 15),
704
705 P4_GEN_ESCR_EMASK(P4_EVENT_64BIT_MMX_UOP, ALL, 15),
706
707 P4_GEN_ESCR_EMASK(P4_EVENT_128BIT_MMX_UOP, ALL, 15),
708
709 P4_GEN_ESCR_EMASK(P4_EVENT_X87_FP_UOP, ALL, 15),
710
711 P4_GEN_ESCR_EMASK(P4_EVENT_TC_MISC, FLUSH, 4),
712
713 P4_GEN_ESCR_EMASK(P4_EVENT_GLOBAL_POWER_EVENTS, RUNNING, 0),
714
715 P4_GEN_ESCR_EMASK(P4_EVENT_TC_MS_XFER, CISC, 0),
716
717 P4_GEN_ESCR_EMASK(P4_EVENT_UOP_QUEUE_WRITES, FROM_TC_BUILD, 0),
718 P4_GEN_ESCR_EMASK(P4_EVENT_UOP_QUEUE_WRITES, FROM_TC_DELIVER, 1),
719 P4_GEN_ESCR_EMASK(P4_EVENT_UOP_QUEUE_WRITES, FROM_ROM, 2),
720
721 P4_GEN_ESCR_EMASK(P4_EVENT_RETIRED_MISPRED_BRANCH_TYPE, CONDITIONAL, 1),
722 P4_GEN_ESCR_EMASK(P4_EVENT_RETIRED_MISPRED_BRANCH_TYPE, CALL, 2),
723 P4_GEN_ESCR_EMASK(P4_EVENT_RETIRED_MISPRED_BRANCH_TYPE, RETURN, 3),
724 P4_GEN_ESCR_EMASK(P4_EVENT_RETIRED_MISPRED_BRANCH_TYPE, INDIRECT, 4),
725
726 P4_GEN_ESCR_EMASK(P4_EVENT_RETIRED_BRANCH_TYPE, CONDITIONAL, 1),
727 P4_GEN_ESCR_EMASK(P4_EVENT_RETIRED_BRANCH_TYPE, CALL, 2),
728 P4_GEN_ESCR_EMASK(P4_EVENT_RETIRED_BRANCH_TYPE, RETURN, 3),
729 P4_GEN_ESCR_EMASK(P4_EVENT_RETIRED_BRANCH_TYPE, INDIRECT, 4),
730
731 P4_GEN_ESCR_EMASK(P4_EVENT_RESOURCE_STALL, SBFULL, 5),
732
733 P4_GEN_ESCR_EMASK(P4_EVENT_WC_BUFFER, WCB_EVICTS, 0),
734 P4_GEN_ESCR_EMASK(P4_EVENT_WC_BUFFER, WCB_FULL_EVICTS, 1),
735
736 P4_GEN_ESCR_EMASK(P4_EVENT_FRONT_END_EVENT, NBOGUS, 0),
737 P4_GEN_ESCR_EMASK(P4_EVENT_FRONT_END_EVENT, BOGUS, 1),
738
739 P4_GEN_ESCR_EMASK(P4_EVENT_EXECUTION_EVENT, NBOGUS0, 0),
740 P4_GEN_ESCR_EMASK(P4_EVENT_EXECUTION_EVENT, NBOGUS1, 1),
741 P4_GEN_ESCR_EMASK(P4_EVENT_EXECUTION_EVENT, NBOGUS2, 2),
742 P4_GEN_ESCR_EMASK(P4_EVENT_EXECUTION_EVENT, NBOGUS3, 3),
743 P4_GEN_ESCR_EMASK(P4_EVENT_EXECUTION_EVENT, BOGUS0, 4),
744 P4_GEN_ESCR_EMASK(P4_EVENT_EXECUTION_EVENT, BOGUS1, 5),
745 P4_GEN_ESCR_EMASK(P4_EVENT_EXECUTION_EVENT, BOGUS2, 6),
746 P4_GEN_ESCR_EMASK(P4_EVENT_EXECUTION_EVENT, BOGUS3, 7),
747
748 P4_GEN_ESCR_EMASK(P4_EVENT_REPLAY_EVENT, NBOGUS, 0),
749 P4_GEN_ESCR_EMASK(P4_EVENT_REPLAY_EVENT, BOGUS, 1),
750
751 P4_GEN_ESCR_EMASK(P4_EVENT_INSTR_RETIRED, NBOGUSNTAG, 0),
752 P4_GEN_ESCR_EMASK(P4_EVENT_INSTR_RETIRED, NBOGUSTAG, 1),
753 P4_GEN_ESCR_EMASK(P4_EVENT_INSTR_RETIRED, BOGUSNTAG, 2),
754 P4_GEN_ESCR_EMASK(P4_EVENT_INSTR_RETIRED, BOGUSTAG, 3),
755
756 P4_GEN_ESCR_EMASK(P4_EVENT_UOPS_RETIRED, NBOGUS, 0),
757 P4_GEN_ESCR_EMASK(P4_EVENT_UOPS_RETIRED, BOGUS, 1),
758
759 P4_GEN_ESCR_EMASK(P4_EVENT_UOP_TYPE, TAGLOADS, 1),
760 P4_GEN_ESCR_EMASK(P4_EVENT_UOP_TYPE, TAGSTORES, 2),
761
762 P4_GEN_ESCR_EMASK(P4_EVENT_BRANCH_RETIRED, MMNP, 0),
763 P4_GEN_ESCR_EMASK(P4_EVENT_BRANCH_RETIRED, MMNM, 1),
764 P4_GEN_ESCR_EMASK(P4_EVENT_BRANCH_RETIRED, MMTP, 2),
765 P4_GEN_ESCR_EMASK(P4_EVENT_BRANCH_RETIRED, MMTM, 3),
766
767 P4_GEN_ESCR_EMASK(P4_EVENT_MISPRED_BRANCH_RETIRED, NBOGUS, 0),
768
769 P4_GEN_ESCR_EMASK(P4_EVENT_X87_ASSIST, FPSU, 0),
770 P4_GEN_ESCR_EMASK(P4_EVENT_X87_ASSIST, FPSO, 1),
771 P4_GEN_ESCR_EMASK(P4_EVENT_X87_ASSIST, POAO, 2),
772 P4_GEN_ESCR_EMASK(P4_EVENT_X87_ASSIST, POAU, 3),
773 P4_GEN_ESCR_EMASK(P4_EVENT_X87_ASSIST, PREA, 4),
774
775 P4_GEN_ESCR_EMASK(P4_EVENT_MACHINE_CLEAR, CLEAR, 0),
776 P4_GEN_ESCR_EMASK(P4_EVENT_MACHINE_CLEAR, MOCLEAR, 1),
777 P4_GEN_ESCR_EMASK(P4_EVENT_MACHINE_CLEAR, SMCLEAR, 2),
778
779 P4_GEN_ESCR_EMASK(P4_EVENT_INSTR_COMPLETED, NBOGUS, 0),
780 P4_GEN_ESCR_EMASK(P4_EVENT_INSTR_COMPLETED, BOGUS, 1),
781};
782
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +0800783/*
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +0800784 * Note we have UOP and PEBS bits reserved for now
785 * just in case if we will need them once
786 */
787#define P4_PEBS_CONFIG_ENABLE (1 << 7)
788#define P4_PEBS_CONFIG_UOP_TAG (1 << 8)
789#define P4_PEBS_CONFIG_METRIC_MASK 0x3f
790#define P4_PEBS_CONFIG_MASK 0xff
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800791
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +0800792/*
793 * mem: Only counters MSR_IQ_COUNTER4 (16) and
794 * MSR_IQ_COUNTER5 (17) are allowed for PEBS sampling
795 */
796#define P4_PEBS_ENABLE 0x02000000U
797#define P4_PEBS_ENABLE_UOP_TAG 0x01000000U
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800798
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +0800799#define p4_config_unpack_metric(v) (((u64)(v)) & P4_PEBS_CONFIG_METRIC_MASK)
800#define p4_config_unpack_pebs(v) (((u64)(v)) & P4_PEBS_CONFIG_MASK)
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800801
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +0800802#define p4_config_pebs_has(v, mask) (p4_config_unpack_pebs(v) & (mask))
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800803
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +0800804enum P4_PEBS_METRIC {
805 P4_PEBS_METRIC__none,
Cyrill Gorcunovd814f302010-03-24 12:09:26 +0800806
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +0800807 P4_PEBS_METRIC__1stl_cache_load_miss_retired,
808 P4_PEBS_METRIC__2ndl_cache_load_miss_retired,
809 P4_PEBS_METRIC__dtlb_load_miss_retired,
810 P4_PEBS_METRIC__dtlb_store_miss_retired,
811 P4_PEBS_METRIC__dtlb_all_miss_retired,
812 P4_PEBS_METRIC__tagged_mispred_branch,
813 P4_PEBS_METRIC__mob_load_replay_retired,
814 P4_PEBS_METRIC__split_load_retired,
815 P4_PEBS_METRIC__split_store_retired,
816
817 P4_PEBS_METRIC__max
Lin Mingcb7d6b52010-03-18 18:33:12 +0800818};
819
Cyrill Gorcunovaf86da52010-11-26 14:32:09 +0300820/*
821 * Notes on internal configuration of ESCR+CCCR tuples
822 *
823 * Since P4 has quite the different architecture of
824 * performance registers in compare with "architectural"
825 * once and we have on 64 bits to keep configuration
826 * of performance event, the following trick is used.
827 *
828 * 1) Since both ESCR and CCCR registers have only low
829 * 32 bits valuable, we pack them into a single 64 bit
830 * configuration. Low 32 bits of such config correspond
831 * to low 32 bits of CCCR register and high 32 bits
832 * correspond to low 32 bits of ESCR register.
833 *
834 * 2) The meaning of every bit of such config field can
835 * be found in Intel SDM but it should be noted that
836 * we "borrow" some reserved bits for own usage and
837 * clean them or set to a proper value when we do
838 * a real write to hardware registers.
839 *
840 * 3) The format of bits of config is the following
841 * and should be either 0 or set to some predefined
842 * values:
843 *
844 * Low 32 bits
845 * -----------
846 * 0-6: P4_PEBS_METRIC enum
847 * 7-11: reserved
848 * 12: reserved (Enable)
849 * 13-15: reserved (ESCR select)
850 * 16-17: Active Thread
851 * 18: Compare
852 * 19: Complement
853 * 20-23: Threshold
854 * 24: Edge
855 * 25: reserved (FORCE_OVF)
856 * 26: reserved (OVF_PMI_T0)
857 * 27: reserved (OVF_PMI_T1)
858 * 28-29: reserved
859 * 30: reserved (Cascade)
860 * 31: reserved (OVF)
861 *
862 * High 32 bits
863 * ------------
864 * 0: reserved (T1_USR)
865 * 1: reserved (T1_OS)
866 * 2: reserved (T0_USR)
867 * 3: reserved (T0_OS)
868 * 4: Tag Enable
869 * 5-8: Tag Value
870 * 9-24: Event Mask (may use P4_ESCR_EMASK_BIT helper)
871 * 25-30: enum P4_EVENTS
872 * 31: reserved (HT thread)
873 */
874
Cyrill Gorcunova0727382010-03-11 19:54:39 +0300875#endif /* PERF_EVENT_P4_H */
Cyrill Gorcunov39ef13a2010-07-05 10:09:29 +0800876