2 * ARMv7 Cortex-A8 and Cortex-A9 Performance Events handling code.
4 * ARMv7 support: Jean Pihet <jpihet@mvista.com>
5 * 2010 (c) MontaVista Software, LLC.
7 * Copied from ARMv6 code, with the low level code inspired
8 * by the ARMv7 Oprofile code.
10 * Cortex-A8 has up to 4 configurable performance counters and
11 * a single cycle counter.
12 * Cortex-A9 has up to 31 configurable performance counters and
13 * a single cycle counter.
15 * All counters can be enabled/disabled and IRQ masked separately. The cycle
16 * counter and all 4 performance counters together can be reset separately.
21 * Common ARMv7 event types
23 * Note: An implementation may not be able to count all of these events
24 * but the encodings are considered to be `reserved' in the case that
25 * they are not available.
27 enum armv7_perf_types
{
28 ARMV7_PERFCTR_PMNC_SW_INCR
= 0x00,
29 ARMV7_PERFCTR_IFETCH_MISS
= 0x01,
30 ARMV7_PERFCTR_ITLB_MISS
= 0x02,
31 ARMV7_PERFCTR_DCACHE_REFILL
= 0x03, /* L1 */
32 ARMV7_PERFCTR_DCACHE_ACCESS
= 0x04, /* L1 */
33 ARMV7_PERFCTR_DTLB_REFILL
= 0x05,
34 ARMV7_PERFCTR_DREAD
= 0x06,
35 ARMV7_PERFCTR_DWRITE
= 0x07,
36 ARMV7_PERFCTR_INSTR_EXECUTED
= 0x08,
37 ARMV7_PERFCTR_EXC_TAKEN
= 0x09,
38 ARMV7_PERFCTR_EXC_EXECUTED
= 0x0A,
39 ARMV7_PERFCTR_CID_WRITE
= 0x0B,
40 /* ARMV7_PERFCTR_PC_WRITE is equivalent to HW_BRANCH_INSTRUCTIONS.
42 * - all branch instructions,
43 * - instructions that explicitly write the PC,
44 * - exception generating instructions.
46 ARMV7_PERFCTR_PC_WRITE
= 0x0C,
47 ARMV7_PERFCTR_PC_IMM_BRANCH
= 0x0D,
48 ARMV7_PERFCTR_PC_PROC_RETURN
= 0x0E,
49 ARMV7_PERFCTR_UNALIGNED_ACCESS
= 0x0F,
51 /* These events are defined by the PMUv2 supplement (ARM DDI 0457A). */
52 ARMV7_PERFCTR_PC_BRANCH_MIS_PRED
= 0x10,
53 ARMV7_PERFCTR_CLOCK_CYCLES
= 0x11,
54 ARMV7_PERFCTR_PC_BRANCH_PRED
= 0x12,
55 ARMV7_PERFCTR_MEM_ACCESS
= 0x13,
56 ARMV7_PERFCTR_L1_ICACHE_ACCESS
= 0x14,
57 ARMV7_PERFCTR_L1_DCACHE_WB
= 0x15,
58 ARMV7_PERFCTR_L2_DCACHE_ACCESS
= 0x16,
59 ARMV7_PERFCTR_L2_DCACHE_REFILL
= 0x17,
60 ARMV7_PERFCTR_L2_DCACHE_WB
= 0x18,
61 ARMV7_PERFCTR_BUS_ACCESS
= 0x19,
62 ARMV7_PERFCTR_MEMORY_ERROR
= 0x1A,
63 ARMV7_PERFCTR_INSTR_SPEC
= 0x1B,
64 ARMV7_PERFCTR_TTBR_WRITE
= 0x1C,
65 ARMV7_PERFCTR_BUS_CYCLES
= 0x1D,
67 ARMV7_PERFCTR_CPU_CYCLES
= 0xFF
70 /* ARMv7 Cortex-A8 specific event types */
71 enum armv7_a8_perf_types
{
72 ARMV7_PERFCTR_WRITE_BUFFER_FULL
= 0x40,
73 ARMV7_PERFCTR_L2_STORE_MERGED
= 0x41,
74 ARMV7_PERFCTR_L2_STORE_BUFF
= 0x42,
75 ARMV7_PERFCTR_L2_ACCESS
= 0x43,
76 ARMV7_PERFCTR_L2_CACH_MISS
= 0x44,
77 ARMV7_PERFCTR_AXI_READ_CYCLES
= 0x45,
78 ARMV7_PERFCTR_AXI_WRITE_CYCLES
= 0x46,
79 ARMV7_PERFCTR_MEMORY_REPLAY
= 0x47,
80 ARMV7_PERFCTR_UNALIGNED_ACCESS_REPLAY
= 0x48,
81 ARMV7_PERFCTR_L1_DATA_MISS
= 0x49,
82 ARMV7_PERFCTR_L1_INST_MISS
= 0x4A,
83 ARMV7_PERFCTR_L1_DATA_COLORING
= 0x4B,
84 ARMV7_PERFCTR_L1_NEON_DATA
= 0x4C,
85 ARMV7_PERFCTR_L1_NEON_CACH_DATA
= 0x4D,
86 ARMV7_PERFCTR_L2_NEON
= 0x4E,
87 ARMV7_PERFCTR_L2_NEON_HIT
= 0x4F,
88 ARMV7_PERFCTR_L1_INST
= 0x50,
89 ARMV7_PERFCTR_PC_RETURN_MIS_PRED
= 0x51,
90 ARMV7_PERFCTR_PC_BRANCH_FAILED
= 0x52,
91 ARMV7_PERFCTR_PC_BRANCH_TAKEN
= 0x53,
92 ARMV7_PERFCTR_PC_BRANCH_EXECUTED
= 0x54,
93 ARMV7_PERFCTR_OP_EXECUTED
= 0x55,
94 ARMV7_PERFCTR_CYCLES_INST_STALL
= 0x56,
95 ARMV7_PERFCTR_CYCLES_INST
= 0x57,
96 ARMV7_PERFCTR_CYCLES_NEON_DATA_STALL
= 0x58,
97 ARMV7_PERFCTR_CYCLES_NEON_INST_STALL
= 0x59,
98 ARMV7_PERFCTR_NEON_CYCLES
= 0x5A,
100 ARMV7_PERFCTR_PMU0_EVENTS
= 0x70,
101 ARMV7_PERFCTR_PMU1_EVENTS
= 0x71,
102 ARMV7_PERFCTR_PMU_EVENTS
= 0x72,
105 /* ARMv7 Cortex-A9 specific event types */
106 enum armv7_a9_perf_types
{
107 ARMV7_PERFCTR_JAVA_HW_BYTECODE_EXEC
= 0x40,
108 ARMV7_PERFCTR_JAVA_SW_BYTECODE_EXEC
= 0x41,
109 ARMV7_PERFCTR_JAZELLE_BRANCH_EXEC
= 0x42,
111 ARMV7_PERFCTR_COHERENT_LINE_MISS
= 0x50,
112 ARMV7_PERFCTR_COHERENT_LINE_HIT
= 0x51,
114 ARMV7_PERFCTR_ICACHE_DEP_STALL_CYCLES
= 0x60,
115 ARMV7_PERFCTR_DCACHE_DEP_STALL_CYCLES
= 0x61,
116 ARMV7_PERFCTR_TLB_MISS_DEP_STALL_CYCLES
= 0x62,
117 ARMV7_PERFCTR_STREX_EXECUTED_PASSED
= 0x63,
118 ARMV7_PERFCTR_STREX_EXECUTED_FAILED
= 0x64,
119 ARMV7_PERFCTR_DATA_EVICTION
= 0x65,
120 ARMV7_PERFCTR_ISSUE_STAGE_NO_INST
= 0x66,
121 ARMV7_PERFCTR_ISSUE_STAGE_EMPTY
= 0x67,
122 ARMV7_PERFCTR_INST_OUT_OF_RENAME_STAGE
= 0x68,
124 ARMV7_PERFCTR_PREDICTABLE_FUNCT_RETURNS
= 0x6E,
126 ARMV7_PERFCTR_MAIN_UNIT_EXECUTED_INST
= 0x70,
127 ARMV7_PERFCTR_SECOND_UNIT_EXECUTED_INST
= 0x71,
128 ARMV7_PERFCTR_LD_ST_UNIT_EXECUTED_INST
= 0x72,
129 ARMV7_PERFCTR_FP_EXECUTED_INST
= 0x73,
130 ARMV7_PERFCTR_NEON_EXECUTED_INST
= 0x74,
132 ARMV7_PERFCTR_PLD_FULL_DEP_STALL_CYCLES
= 0x80,
133 ARMV7_PERFCTR_DATA_WR_DEP_STALL_CYCLES
= 0x81,
134 ARMV7_PERFCTR_ITLB_MISS_DEP_STALL_CYCLES
= 0x82,
135 ARMV7_PERFCTR_DTLB_MISS_DEP_STALL_CYCLES
= 0x83,
136 ARMV7_PERFCTR_MICRO_ITLB_MISS_DEP_STALL_CYCLES
= 0x84,
137 ARMV7_PERFCTR_MICRO_DTLB_MISS_DEP_STALL_CYCLES
= 0x85,
138 ARMV7_PERFCTR_DMB_DEP_STALL_CYCLES
= 0x86,
140 ARMV7_PERFCTR_INTGR_CLK_ENABLED_CYCLES
= 0x8A,
141 ARMV7_PERFCTR_DATA_ENGINE_CLK_EN_CYCLES
= 0x8B,
143 ARMV7_PERFCTR_ISB_INST
= 0x90,
144 ARMV7_PERFCTR_DSB_INST
= 0x91,
145 ARMV7_PERFCTR_DMB_INST
= 0x92,
146 ARMV7_PERFCTR_EXT_INTERRUPTS
= 0x93,
148 ARMV7_PERFCTR_PLE_CACHE_LINE_RQST_COMPLETED
= 0xA0,
149 ARMV7_PERFCTR_PLE_CACHE_LINE_RQST_SKIPPED
= 0xA1,
150 ARMV7_PERFCTR_PLE_FIFO_FLUSH
= 0xA2,
151 ARMV7_PERFCTR_PLE_RQST_COMPLETED
= 0xA3,
152 ARMV7_PERFCTR_PLE_FIFO_OVERFLOW
= 0xA4,
153 ARMV7_PERFCTR_PLE_RQST_PROG
= 0xA5
156 /* ARMv7 Cortex-A5 specific event types */
157 enum armv7_a5_perf_types
{
158 ARMV7_PERFCTR_IRQ_TAKEN
= 0x86,
159 ARMV7_PERFCTR_FIQ_TAKEN
= 0x87,
161 ARMV7_PERFCTR_EXT_MEM_RQST
= 0xc0,
162 ARMV7_PERFCTR_NC_EXT_MEM_RQST
= 0xc1,
163 ARMV7_PERFCTR_PREFETCH_LINEFILL
= 0xc2,
164 ARMV7_PERFCTR_PREFETCH_LINEFILL_DROP
= 0xc3,
165 ARMV7_PERFCTR_ENTER_READ_ALLOC
= 0xc4,
166 ARMV7_PERFCTR_READ_ALLOC
= 0xc5,
168 ARMV7_PERFCTR_STALL_SB_FULL
= 0xc9,
171 /* ARMv7 Cortex-A15 specific event types */
172 enum armv7_a15_perf_types
{
173 ARMV7_PERFCTR_L1_DCACHE_READ_ACCESS
= 0x40,
174 ARMV7_PERFCTR_L1_DCACHE_WRITE_ACCESS
= 0x41,
175 ARMV7_PERFCTR_L1_DCACHE_READ_REFILL
= 0x42,
176 ARMV7_PERFCTR_L1_DCACHE_WRITE_REFILL
= 0x43,
178 ARMV7_PERFCTR_L1_DTLB_READ_REFILL
= 0x4C,
179 ARMV7_PERFCTR_L1_DTLB_WRITE_REFILL
= 0x4D,
181 ARMV7_PERFCTR_L2_DCACHE_READ_ACCESS
= 0x50,
182 ARMV7_PERFCTR_L2_DCACHE_WRITE_ACCESS
= 0x51,
183 ARMV7_PERFCTR_L2_DCACHE_READ_REFILL
= 0x52,
184 ARMV7_PERFCTR_L2_DCACHE_WRITE_REFILL
= 0x53,
186 ARMV7_PERFCTR_SPEC_PC_WRITE
= 0x76,
190 * Cortex-A8 HW events mapping
192 * The hardware events that we support. We do support cache operations but
193 * we have harvard caches and no way to combine instruction and data
194 * accesses/misses in hardware.
196 static const unsigned armv7_a8_perf_map
[PERF_COUNT_HW_MAX
] = {
197 [PERF_COUNT_HW_CPU_CYCLES
] = ARMV7_PERFCTR_CPU_CYCLES
,
198 [PERF_COUNT_HW_INSTRUCTIONS
] = ARMV7_PERFCTR_INSTR_EXECUTED
,
199 [PERF_COUNT_HW_CACHE_REFERENCES
] = HW_OP_UNSUPPORTED
,
200 [PERF_COUNT_HW_CACHE_MISSES
] = HW_OP_UNSUPPORTED
,
201 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS
] = ARMV7_PERFCTR_PC_WRITE
,
202 [PERF_COUNT_HW_BRANCH_MISSES
] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED
,
203 [PERF_COUNT_HW_BUS_CYCLES
] = ARMV7_PERFCTR_CLOCK_CYCLES
,
206 static const unsigned armv7_a8_perf_cache_map
[PERF_COUNT_HW_CACHE_MAX
]
207 [PERF_COUNT_HW_CACHE_OP_MAX
]
208 [PERF_COUNT_HW_CACHE_RESULT_MAX
] = {
211 * The performance counters don't differentiate between read
212 * and write accesses/misses so this isn't strictly correct,
213 * but it's the best we can do. Writes and reads get
217 [C(RESULT_ACCESS
)] = ARMV7_PERFCTR_DCACHE_ACCESS
,
218 [C(RESULT_MISS
)] = ARMV7_PERFCTR_DCACHE_REFILL
,
221 [C(RESULT_ACCESS
)] = ARMV7_PERFCTR_DCACHE_ACCESS
,
222 [C(RESULT_MISS
)] = ARMV7_PERFCTR_DCACHE_REFILL
,
225 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
226 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
231 [C(RESULT_ACCESS
)] = ARMV7_PERFCTR_L1_INST
,
232 [C(RESULT_MISS
)] = ARMV7_PERFCTR_L1_INST_MISS
,
235 [C(RESULT_ACCESS
)] = ARMV7_PERFCTR_L1_INST
,
236 [C(RESULT_MISS
)] = ARMV7_PERFCTR_L1_INST_MISS
,
239 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
240 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
245 [C(RESULT_ACCESS
)] = ARMV7_PERFCTR_L2_ACCESS
,
246 [C(RESULT_MISS
)] = ARMV7_PERFCTR_L2_CACH_MISS
,
249 [C(RESULT_ACCESS
)] = ARMV7_PERFCTR_L2_ACCESS
,
250 [C(RESULT_MISS
)] = ARMV7_PERFCTR_L2_CACH_MISS
,
253 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
254 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
259 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
260 [C(RESULT_MISS
)] = ARMV7_PERFCTR_DTLB_REFILL
,
263 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
264 [C(RESULT_MISS
)] = ARMV7_PERFCTR_DTLB_REFILL
,
267 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
268 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
273 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
274 [C(RESULT_MISS
)] = ARMV7_PERFCTR_ITLB_MISS
,
277 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
278 [C(RESULT_MISS
)] = ARMV7_PERFCTR_ITLB_MISS
,
281 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
282 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
287 [C(RESULT_ACCESS
)] = ARMV7_PERFCTR_PC_WRITE
,
289 = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED
,
292 [C(RESULT_ACCESS
)] = ARMV7_PERFCTR_PC_WRITE
,
294 = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED
,
297 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
298 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
303 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
304 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
307 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
308 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
311 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
312 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
318 * Cortex-A9 HW events mapping
320 static const unsigned armv7_a9_perf_map
[PERF_COUNT_HW_MAX
] = {
321 [PERF_COUNT_HW_CPU_CYCLES
] = ARMV7_PERFCTR_CPU_CYCLES
,
322 [PERF_COUNT_HW_INSTRUCTIONS
] =
323 ARMV7_PERFCTR_INST_OUT_OF_RENAME_STAGE
,
324 [PERF_COUNT_HW_CACHE_REFERENCES
] = ARMV7_PERFCTR_COHERENT_LINE_HIT
,
325 [PERF_COUNT_HW_CACHE_MISSES
] = ARMV7_PERFCTR_COHERENT_LINE_MISS
,
326 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS
] = ARMV7_PERFCTR_PC_WRITE
,
327 [PERF_COUNT_HW_BRANCH_MISSES
] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED
,
328 [PERF_COUNT_HW_BUS_CYCLES
] = ARMV7_PERFCTR_CLOCK_CYCLES
,
331 static const unsigned armv7_a9_perf_cache_map
[PERF_COUNT_HW_CACHE_MAX
]
332 [PERF_COUNT_HW_CACHE_OP_MAX
]
333 [PERF_COUNT_HW_CACHE_RESULT_MAX
] = {
336 * The performance counters don't differentiate between read
337 * and write accesses/misses so this isn't strictly correct,
338 * but it's the best we can do. Writes and reads get
342 [C(RESULT_ACCESS
)] = ARMV7_PERFCTR_DCACHE_ACCESS
,
343 [C(RESULT_MISS
)] = ARMV7_PERFCTR_DCACHE_REFILL
,
346 [C(RESULT_ACCESS
)] = ARMV7_PERFCTR_DCACHE_ACCESS
,
347 [C(RESULT_MISS
)] = ARMV7_PERFCTR_DCACHE_REFILL
,
350 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
351 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
356 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
357 [C(RESULT_MISS
)] = ARMV7_PERFCTR_IFETCH_MISS
,
360 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
361 [C(RESULT_MISS
)] = ARMV7_PERFCTR_IFETCH_MISS
,
364 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
365 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
370 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
371 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
374 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
375 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
378 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
379 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
384 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
385 [C(RESULT_MISS
)] = ARMV7_PERFCTR_DTLB_REFILL
,
388 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
389 [C(RESULT_MISS
)] = ARMV7_PERFCTR_DTLB_REFILL
,
392 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
393 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
398 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
399 [C(RESULT_MISS
)] = ARMV7_PERFCTR_ITLB_MISS
,
402 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
403 [C(RESULT_MISS
)] = ARMV7_PERFCTR_ITLB_MISS
,
406 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
407 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
412 [C(RESULT_ACCESS
)] = ARMV7_PERFCTR_PC_WRITE
,
414 = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED
,
417 [C(RESULT_ACCESS
)] = ARMV7_PERFCTR_PC_WRITE
,
419 = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED
,
422 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
423 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
428 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
429 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
432 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
433 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
436 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
437 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
443 * Cortex-A5 HW events mapping
445 static const unsigned armv7_a5_perf_map
[PERF_COUNT_HW_MAX
] = {
446 [PERF_COUNT_HW_CPU_CYCLES
] = ARMV7_PERFCTR_CPU_CYCLES
,
447 [PERF_COUNT_HW_INSTRUCTIONS
] = ARMV7_PERFCTR_INSTR_EXECUTED
,
448 [PERF_COUNT_HW_CACHE_REFERENCES
] = HW_OP_UNSUPPORTED
,
449 [PERF_COUNT_HW_CACHE_MISSES
] = HW_OP_UNSUPPORTED
,
450 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS
] = ARMV7_PERFCTR_PC_WRITE
,
451 [PERF_COUNT_HW_BRANCH_MISSES
] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED
,
452 [PERF_COUNT_HW_BUS_CYCLES
] = HW_OP_UNSUPPORTED
,
455 static const unsigned armv7_a5_perf_cache_map
[PERF_COUNT_HW_CACHE_MAX
]
456 [PERF_COUNT_HW_CACHE_OP_MAX
]
457 [PERF_COUNT_HW_CACHE_RESULT_MAX
] = {
461 = ARMV7_PERFCTR_DCACHE_ACCESS
,
463 = ARMV7_PERFCTR_DCACHE_REFILL
,
467 = ARMV7_PERFCTR_DCACHE_ACCESS
,
469 = ARMV7_PERFCTR_DCACHE_REFILL
,
473 = ARMV7_PERFCTR_PREFETCH_LINEFILL
,
475 = ARMV7_PERFCTR_PREFETCH_LINEFILL_DROP
,
480 [C(RESULT_ACCESS
)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS
,
481 [C(RESULT_MISS
)] = ARMV7_PERFCTR_IFETCH_MISS
,
484 [C(RESULT_ACCESS
)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS
,
485 [C(RESULT_MISS
)] = ARMV7_PERFCTR_IFETCH_MISS
,
488 * The prefetch counters don't differentiate between the I
489 * side and the D side.
493 = ARMV7_PERFCTR_PREFETCH_LINEFILL
,
495 = ARMV7_PERFCTR_PREFETCH_LINEFILL_DROP
,
500 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
501 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
504 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
505 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
508 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
509 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
514 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
515 [C(RESULT_MISS
)] = ARMV7_PERFCTR_DTLB_REFILL
,
518 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
519 [C(RESULT_MISS
)] = ARMV7_PERFCTR_DTLB_REFILL
,
522 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
523 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
528 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
529 [C(RESULT_MISS
)] = ARMV7_PERFCTR_ITLB_MISS
,
532 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
533 [C(RESULT_MISS
)] = ARMV7_PERFCTR_ITLB_MISS
,
536 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
537 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
542 [C(RESULT_ACCESS
)] = ARMV7_PERFCTR_PC_BRANCH_PRED
,
544 = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED
,
547 [C(RESULT_ACCESS
)] = ARMV7_PERFCTR_PC_BRANCH_PRED
,
549 = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED
,
552 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
553 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
559 * Cortex-A15 HW events mapping
561 static const unsigned armv7_a15_perf_map
[PERF_COUNT_HW_MAX
] = {
562 [PERF_COUNT_HW_CPU_CYCLES
] = ARMV7_PERFCTR_CPU_CYCLES
,
563 [PERF_COUNT_HW_INSTRUCTIONS
] = ARMV7_PERFCTR_INSTR_EXECUTED
,
564 [PERF_COUNT_HW_CACHE_REFERENCES
] = HW_OP_UNSUPPORTED
,
565 [PERF_COUNT_HW_CACHE_MISSES
] = HW_OP_UNSUPPORTED
,
566 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS
] = ARMV7_PERFCTR_SPEC_PC_WRITE
,
567 [PERF_COUNT_HW_BRANCH_MISSES
] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED
,
568 [PERF_COUNT_HW_BUS_CYCLES
] = ARMV7_PERFCTR_BUS_CYCLES
,
571 static const unsigned armv7_a15_perf_cache_map
[PERF_COUNT_HW_CACHE_MAX
]
572 [PERF_COUNT_HW_CACHE_OP_MAX
]
573 [PERF_COUNT_HW_CACHE_RESULT_MAX
] = {
577 = ARMV7_PERFCTR_L1_DCACHE_READ_ACCESS
,
579 = ARMV7_PERFCTR_L1_DCACHE_READ_REFILL
,
583 = ARMV7_PERFCTR_L1_DCACHE_WRITE_ACCESS
,
585 = ARMV7_PERFCTR_L1_DCACHE_WRITE_REFILL
,
588 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
589 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
594 * Not all performance counters differentiate between read
595 * and write accesses/misses so we're not always strictly
596 * correct, but it's the best we can do. Writes and reads get
597 * combined in these cases.
600 [C(RESULT_ACCESS
)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS
,
601 [C(RESULT_MISS
)] = ARMV7_PERFCTR_IFETCH_MISS
,
604 [C(RESULT_ACCESS
)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS
,
605 [C(RESULT_MISS
)] = ARMV7_PERFCTR_IFETCH_MISS
,
608 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
609 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
615 = ARMV7_PERFCTR_L2_DCACHE_READ_ACCESS
,
617 = ARMV7_PERFCTR_L2_DCACHE_READ_REFILL
,
621 = ARMV7_PERFCTR_L2_DCACHE_WRITE_ACCESS
,
623 = ARMV7_PERFCTR_L2_DCACHE_WRITE_REFILL
,
626 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
627 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
632 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
634 = ARMV7_PERFCTR_L1_DTLB_READ_REFILL
,
637 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
639 = ARMV7_PERFCTR_L1_DTLB_WRITE_REFILL
,
642 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
643 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
648 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
649 [C(RESULT_MISS
)] = ARMV7_PERFCTR_ITLB_MISS
,
652 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
653 [C(RESULT_MISS
)] = ARMV7_PERFCTR_ITLB_MISS
,
656 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
657 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
662 [C(RESULT_ACCESS
)] = ARMV7_PERFCTR_PC_BRANCH_PRED
,
664 = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED
,
667 [C(RESULT_ACCESS
)] = ARMV7_PERFCTR_PC_BRANCH_PRED
,
669 = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED
,
672 [C(RESULT_ACCESS
)] = CACHE_OP_UNSUPPORTED
,
673 [C(RESULT_MISS
)] = CACHE_OP_UNSUPPORTED
,
679 * Perf Events counters
681 enum armv7_counters
{
682 ARMV7_CYCLE_COUNTER
= 1, /* Cycle counter */
683 ARMV7_COUNTER0
= 2, /* First event counter */
687 * The cycle counter is ARMV7_CYCLE_COUNTER.
688 * The first event counter is ARMV7_COUNTER0.
689 * The last event counter is (ARMV7_COUNTER0 + armpmu->num_events - 1).
691 #define ARMV7_COUNTER_LAST (ARMV7_COUNTER0 + armpmu->num_events - 1)
694 * ARMv7 low level PMNC access
698 * Per-CPU PMNC: config reg
700 #define ARMV7_PMNC_E (1 << 0) /* Enable all counters */
701 #define ARMV7_PMNC_P (1 << 1) /* Reset all counters */
702 #define ARMV7_PMNC_C (1 << 2) /* Cycle counter reset */
703 #define ARMV7_PMNC_D (1 << 3) /* CCNT counts every 64th cpu cycle */
704 #define ARMV7_PMNC_X (1 << 4) /* Export to ETM */
705 #define ARMV7_PMNC_DP (1 << 5) /* Disable CCNT if non-invasive debug*/
706 #define ARMV7_PMNC_N_SHIFT 11 /* Number of counters supported */
707 #define ARMV7_PMNC_N_MASK 0x1f
708 #define ARMV7_PMNC_MASK 0x3f /* Mask for writable bits */
713 #define ARMV7_CNT0 0 /* First event counter */
714 #define ARMV7_CCNT 31 /* Cycle counter */
716 /* Perf Event to low level counters mapping */
717 #define ARMV7_EVENT_CNT_TO_CNTx (ARMV7_COUNTER0 - ARMV7_CNT0)
720 * CNTENS: counters enable reg
722 #define ARMV7_CNTENS_P(idx) (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
723 #define ARMV7_CNTENS_C (1 << ARMV7_CCNT)
726 * CNTENC: counters disable reg
728 #define ARMV7_CNTENC_P(idx) (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
729 #define ARMV7_CNTENC_C (1 << ARMV7_CCNT)
732 * INTENS: counters overflow interrupt enable reg
734 #define ARMV7_INTENS_P(idx) (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
735 #define ARMV7_INTENS_C (1 << ARMV7_CCNT)
738 * INTENC: counters overflow interrupt disable reg
740 #define ARMV7_INTENC_P(idx) (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
741 #define ARMV7_INTENC_C (1 << ARMV7_CCNT)
744 * EVTSEL: Event selection reg
746 #define ARMV7_EVTSEL_MASK 0xff /* Mask for writable bits */
749 * SELECT: Counter selection reg
751 #define ARMV7_SELECT_MASK 0x1f /* Mask for writable bits */
754 * FLAG: counters overflow flag status reg
756 #define ARMV7_FLAG_P(idx) (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
757 #define ARMV7_FLAG_C (1 << ARMV7_CCNT)
758 #define ARMV7_FLAG_MASK 0xffffffff /* Mask for writable bits */
759 #define ARMV7_OVERFLOWED_MASK ARMV7_FLAG_MASK
761 static inline unsigned long armv7_pmnc_read(void)
764 asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r"(val
));
768 static inline void armv7_pmnc_write(unsigned long val
)
770 val
&= ARMV7_PMNC_MASK
;
772 asm volatile("mcr p15, 0, %0, c9, c12, 0" : : "r"(val
));
775 static inline int armv7_pmnc_has_overflowed(unsigned long pmnc
)
777 return pmnc
& ARMV7_OVERFLOWED_MASK
;
780 static inline int armv7_pmnc_counter_has_overflowed(unsigned long pmnc
,
781 enum armv7_counters counter
)
785 if (counter
== ARMV7_CYCLE_COUNTER
)
786 ret
= pmnc
& ARMV7_FLAG_C
;
787 else if ((counter
>= ARMV7_COUNTER0
) && (counter
<= ARMV7_COUNTER_LAST
))
788 ret
= pmnc
& ARMV7_FLAG_P(counter
);
790 pr_err("CPU%u checking wrong counter %d overflow status\n",
791 smp_processor_id(), counter
);
796 static inline int armv7_pmnc_select_counter(unsigned int idx
)
800 if ((idx
< ARMV7_COUNTER0
) || (idx
> ARMV7_COUNTER_LAST
)) {
801 pr_err("CPU%u selecting wrong PMNC counter"
802 " %d\n", smp_processor_id(), idx
);
806 val
= (idx
- ARMV7_EVENT_CNT_TO_CNTx
) & ARMV7_SELECT_MASK
;
807 asm volatile("mcr p15, 0, %0, c9, c12, 5" : : "r" (val
));
813 static inline u32
armv7pmu_read_counter(int idx
)
815 unsigned long value
= 0;
817 if (idx
== ARMV7_CYCLE_COUNTER
)
818 asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (value
));
819 else if ((idx
>= ARMV7_COUNTER0
) && (idx
<= ARMV7_COUNTER_LAST
)) {
820 if (armv7_pmnc_select_counter(idx
) == idx
)
821 asm volatile("mrc p15, 0, %0, c9, c13, 2"
824 pr_err("CPU%u reading wrong counter %d\n",
825 smp_processor_id(), idx
);
830 static inline void armv7pmu_write_counter(int idx
, u32 value
)
832 if (idx
== ARMV7_CYCLE_COUNTER
)
833 asm volatile("mcr p15, 0, %0, c9, c13, 0" : : "r" (value
));
834 else if ((idx
>= ARMV7_COUNTER0
) && (idx
<= ARMV7_COUNTER_LAST
)) {
835 if (armv7_pmnc_select_counter(idx
) == idx
)
836 asm volatile("mcr p15, 0, %0, c9, c13, 2"
839 pr_err("CPU%u writing wrong counter %d\n",
840 smp_processor_id(), idx
);
843 static inline void armv7_pmnc_write_evtsel(unsigned int idx
, u32 val
)
845 if (armv7_pmnc_select_counter(idx
) == idx
) {
846 val
&= ARMV7_EVTSEL_MASK
;
847 asm volatile("mcr p15, 0, %0, c9, c13, 1" : : "r" (val
));
851 static inline u32
armv7_pmnc_enable_counter(unsigned int idx
)
855 if ((idx
!= ARMV7_CYCLE_COUNTER
) &&
856 ((idx
< ARMV7_COUNTER0
) || (idx
> ARMV7_COUNTER_LAST
))) {
857 pr_err("CPU%u enabling wrong PMNC counter"
858 " %d\n", smp_processor_id(), idx
);
862 if (idx
== ARMV7_CYCLE_COUNTER
)
863 val
= ARMV7_CNTENS_C
;
865 val
= ARMV7_CNTENS_P(idx
);
867 asm volatile("mcr p15, 0, %0, c9, c12, 1" : : "r" (val
));
872 static inline u32
armv7_pmnc_disable_counter(unsigned int idx
)
877 if ((idx
!= ARMV7_CYCLE_COUNTER
) &&
878 ((idx
< ARMV7_COUNTER0
) || (idx
> ARMV7_COUNTER_LAST
))) {
879 pr_err("CPU%u disabling wrong PMNC counter"
880 " %d\n", smp_processor_id(), idx
);
884 if (idx
== ARMV7_CYCLE_COUNTER
)
885 val
= ARMV7_CNTENC_C
;
887 val
= ARMV7_CNTENC_P(idx
);
889 asm volatile("mcr p15, 0, %0, c9, c12, 2" : : "r" (val
));
894 static inline u32
armv7_pmnc_enable_intens(unsigned int idx
)
898 if ((idx
!= ARMV7_CYCLE_COUNTER
) &&
899 ((idx
< ARMV7_COUNTER0
) || (idx
> ARMV7_COUNTER_LAST
))) {
900 pr_err("CPU%u enabling wrong PMNC counter"
901 " interrupt enable %d\n", smp_processor_id(), idx
);
905 if (idx
== ARMV7_CYCLE_COUNTER
)
906 val
= ARMV7_INTENS_C
;
908 val
= ARMV7_INTENS_P(idx
);
910 asm volatile("mcr p15, 0, %0, c9, c14, 1" : : "r" (val
));
915 static inline u32
armv7_pmnc_disable_intens(unsigned int idx
)
919 if ((idx
!= ARMV7_CYCLE_COUNTER
) &&
920 ((idx
< ARMV7_COUNTER0
) || (idx
> ARMV7_COUNTER_LAST
))) {
921 pr_err("CPU%u disabling wrong PMNC counter"
922 " interrupt enable %d\n", smp_processor_id(), idx
);
926 if (idx
== ARMV7_CYCLE_COUNTER
)
927 val
= ARMV7_INTENC_C
;
929 val
= ARMV7_INTENC_P(idx
);
931 asm volatile("mcr p15, 0, %0, c9, c14, 2" : : "r" (val
));
936 static inline u32
armv7_pmnc_getreset_flags(void)
941 asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val
));
943 /* Write to clear flags */
944 val
&= ARMV7_FLAG_MASK
;
945 asm volatile("mcr p15, 0, %0, c9, c12, 3" : : "r" (val
));
951 static void armv7_pmnc_dump_regs(void)
956 printk(KERN_INFO
"PMNC registers dump:\n");
958 asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r" (val
));
959 printk(KERN_INFO
"PMNC =0x%08x\n", val
);
961 asm volatile("mrc p15, 0, %0, c9, c12, 1" : "=r" (val
));
962 printk(KERN_INFO
"CNTENS=0x%08x\n", val
);
964 asm volatile("mrc p15, 0, %0, c9, c14, 1" : "=r" (val
));
965 printk(KERN_INFO
"INTENS=0x%08x\n", val
);
967 asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val
));
968 printk(KERN_INFO
"FLAGS =0x%08x\n", val
);
970 asm volatile("mrc p15, 0, %0, c9, c12, 5" : "=r" (val
));
971 printk(KERN_INFO
"SELECT=0x%08x\n", val
);
973 asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (val
));
974 printk(KERN_INFO
"CCNT =0x%08x\n", val
);
976 for (cnt
= ARMV7_COUNTER0
; cnt
< ARMV7_COUNTER_LAST
; cnt
++) {
977 armv7_pmnc_select_counter(cnt
);
978 asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (val
));
979 printk(KERN_INFO
"CNT[%d] count =0x%08x\n",
980 cnt
-ARMV7_EVENT_CNT_TO_CNTx
, val
);
981 asm volatile("mrc p15, 0, %0, c9, c13, 1" : "=r" (val
));
982 printk(KERN_INFO
"CNT[%d] evtsel=0x%08x\n",
983 cnt
-ARMV7_EVENT_CNT_TO_CNTx
, val
);
988 static void armv7pmu_enable_event(struct hw_perf_event
*hwc
, int idx
)
993 * Enable counter and interrupt, and set the counter to count
994 * the event that we're interested in.
996 raw_spin_lock_irqsave(&pmu_lock
, flags
);
1001 armv7_pmnc_disable_counter(idx
);
1004 * Set event (if destined for PMNx counters)
1005 * We don't need to set the event if it's a cycle count
1007 if (idx
!= ARMV7_CYCLE_COUNTER
)
1008 armv7_pmnc_write_evtsel(idx
, hwc
->config_base
);
1011 * Enable interrupt for this counter
1013 armv7_pmnc_enable_intens(idx
);
1018 armv7_pmnc_enable_counter(idx
);
1020 raw_spin_unlock_irqrestore(&pmu_lock
, flags
);
1023 static void armv7pmu_disable_event(struct hw_perf_event
*hwc
, int idx
)
1025 unsigned long flags
;
1028 * Disable counter and interrupt
1030 raw_spin_lock_irqsave(&pmu_lock
, flags
);
1035 armv7_pmnc_disable_counter(idx
);
1038 * Disable interrupt for this counter
1040 armv7_pmnc_disable_intens(idx
);
1042 raw_spin_unlock_irqrestore(&pmu_lock
, flags
);
1045 static irqreturn_t
armv7pmu_handle_irq(int irq_num
, void *dev
)
1048 struct perf_sample_data data
;
1049 struct cpu_hw_events
*cpuc
;
1050 struct pt_regs
*regs
;
1054 * Get and reset the IRQ flags
1056 pmnc
= armv7_pmnc_getreset_flags();
1059 * Did an overflow occur?
1061 if (!armv7_pmnc_has_overflowed(pmnc
))
1065 * Handle the counter(s) overflow(s)
1067 regs
= get_irq_regs();
1069 perf_sample_data_init(&data
, 0);
1071 cpuc
= &__get_cpu_var(cpu_hw_events
);
1072 for (idx
= 0; idx
<= armpmu
->num_events
; ++idx
) {
1073 struct perf_event
*event
= cpuc
->events
[idx
];
1074 struct hw_perf_event
*hwc
;
1076 if (!test_bit(idx
, cpuc
->active_mask
))
1080 * We have a single interrupt for all counters. Check that
1081 * each counter has overflowed before we process it.
1083 if (!armv7_pmnc_counter_has_overflowed(pmnc
, idx
))
1087 armpmu_event_update(event
, hwc
, idx
, 1);
1088 data
.period
= event
->hw
.last_period
;
1089 if (!armpmu_event_set_period(event
, hwc
, idx
))
1092 if (perf_event_overflow(event
, &data
, regs
))
1093 armpmu
->disable(hwc
, idx
);
1097 * Handle the pending perf events.
1099 * Note: this call *must* be run with interrupts disabled. For
1100 * platforms that can have the PMU interrupts raised as an NMI, this
1108 static void armv7pmu_start(void)
1110 unsigned long flags
;
1112 raw_spin_lock_irqsave(&pmu_lock
, flags
);
1113 /* Enable all counters */
1114 armv7_pmnc_write(armv7_pmnc_read() | ARMV7_PMNC_E
);
1115 raw_spin_unlock_irqrestore(&pmu_lock
, flags
);
1118 static void armv7pmu_stop(void)
1120 unsigned long flags
;
1122 raw_spin_lock_irqsave(&pmu_lock
, flags
);
1123 /* Disable all counters */
1124 armv7_pmnc_write(armv7_pmnc_read() & ~ARMV7_PMNC_E
);
1125 raw_spin_unlock_irqrestore(&pmu_lock
, flags
);
1128 static int armv7pmu_get_event_idx(struct cpu_hw_events
*cpuc
,
1129 struct hw_perf_event
*event
)
1133 /* Always place a cycle counter into the cycle counter. */
1134 if (event
->config_base
== ARMV7_PERFCTR_CPU_CYCLES
) {
1135 if (test_and_set_bit(ARMV7_CYCLE_COUNTER
, cpuc
->used_mask
))
1138 return ARMV7_CYCLE_COUNTER
;
1141 * For anything other than a cycle counter, try and use
1142 * the events counters
1144 for (idx
= ARMV7_COUNTER0
; idx
<= armpmu
->num_events
; ++idx
) {
1145 if (!test_and_set_bit(idx
, cpuc
->used_mask
))
1149 /* The counters are all in use. */
1154 static void armv7pmu_reset(void *info
)
1156 u32 idx
, nb_cnt
= armpmu
->num_events
;
1158 /* The counter and interrupt enable registers are unknown at reset. */
1159 for (idx
= 1; idx
< nb_cnt
; ++idx
)
1160 armv7pmu_disable_event(NULL
, idx
);
1162 /* Initialize & Reset PMNC: C and P bits */
1163 armv7_pmnc_write(ARMV7_PMNC_P
| ARMV7_PMNC_C
);
1166 static struct arm_pmu armv7pmu
= {
1167 .handle_irq
= armv7pmu_handle_irq
,
1168 .enable
= armv7pmu_enable_event
,
1169 .disable
= armv7pmu_disable_event
,
1170 .read_counter
= armv7pmu_read_counter
,
1171 .write_counter
= armv7pmu_write_counter
,
1172 .get_event_idx
= armv7pmu_get_event_idx
,
1173 .start
= armv7pmu_start
,
1174 .stop
= armv7pmu_stop
,
1175 .reset
= armv7pmu_reset
,
1176 .raw_event_mask
= 0xFF,
1177 .max_period
= (1LLU << 32) - 1,
1180 static u32 __init
armv7_read_num_pmnc_events(void)
1184 /* Read the nb of CNTx counters supported from PMNC */
1185 nb_cnt
= (armv7_pmnc_read() >> ARMV7_PMNC_N_SHIFT
) & ARMV7_PMNC_N_MASK
;
1187 /* Add the CPU cycles counter and return */
1191 static const struct arm_pmu
*__init
armv7_a8_pmu_init(void)
1193 armv7pmu
.id
= ARM_PERF_PMU_ID_CA8
;
1194 armv7pmu
.name
= "ARMv7 Cortex-A8";
1195 armv7pmu
.cache_map
= &armv7_a8_perf_cache_map
;
1196 armv7pmu
.event_map
= &armv7_a8_perf_map
;
1197 armv7pmu
.num_events
= armv7_read_num_pmnc_events();
1201 static const struct arm_pmu
*__init
armv7_a9_pmu_init(void)
1203 armv7pmu
.id
= ARM_PERF_PMU_ID_CA9
;
1204 armv7pmu
.name
= "ARMv7 Cortex-A9";
1205 armv7pmu
.cache_map
= &armv7_a9_perf_cache_map
;
1206 armv7pmu
.event_map
= &armv7_a9_perf_map
;
1207 armv7pmu
.num_events
= armv7_read_num_pmnc_events();
1211 static const struct arm_pmu
*__init
armv7_a5_pmu_init(void)
1213 armv7pmu
.id
= ARM_PERF_PMU_ID_CA5
;
1214 armv7pmu
.name
= "ARMv7 Cortex-A5";
1215 armv7pmu
.cache_map
= &armv7_a5_perf_cache_map
;
1216 armv7pmu
.event_map
= &armv7_a5_perf_map
;
1217 armv7pmu
.num_events
= armv7_read_num_pmnc_events();
1221 static const struct arm_pmu
*__init
armv7_a15_pmu_init(void)
1223 armv7pmu
.id
= ARM_PERF_PMU_ID_CA15
;
1224 armv7pmu
.name
= "ARMv7 Cortex-A15";
1225 armv7pmu
.cache_map
= &armv7_a15_perf_cache_map
;
1226 armv7pmu
.event_map
= &armv7_a15_perf_map
;
1227 armv7pmu
.num_events
= armv7_read_num_pmnc_events();
1231 static const struct arm_pmu
*__init
armv7_a8_pmu_init(void)
1236 static const struct arm_pmu
*__init
armv7_a9_pmu_init(void)
1241 static const struct arm_pmu
*__init
armv7_a5_pmu_init(void)
1246 static const struct arm_pmu
*__init
armv7_a15_pmu_init(void)
1250 #endif /* CONFIG_CPU_V7 */