ARM: perf: index ARMv7 event counters starting from zero
[pandora-kernel.git] / arch / arm / kernel / perf_event_v7.c
1 /*
2  * ARMv7 Cortex-A8 and Cortex-A9 Performance Events handling code.
3  *
4  * ARMv7 support: Jean Pihet <jpihet@mvista.com>
5  * 2010 (c) MontaVista Software, LLC.
6  *
7  * Copied from ARMv6 code, with the low level code inspired
8  *  by the ARMv7 Oprofile code.
9  *
10  * Cortex-A8 has up to 4 configurable performance counters and
11  *  a single cycle counter.
12  * Cortex-A9 has up to 31 configurable performance counters and
13  *  a single cycle counter.
14  *
15  * All counters can be enabled/disabled and IRQ masked separately. The cycle
16  *  counter and all 4 performance counters together can be reset separately.
17  */
18
19 #ifdef CONFIG_CPU_V7
20 /*
21  * Common ARMv7 event types
22  *
23  * Note: An implementation may not be able to count all of these events
24  * but the encodings are considered to be `reserved' in the case that
25  * they are not available.
26  */
27 enum armv7_perf_types {
28         ARMV7_PERFCTR_PMNC_SW_INCR              = 0x00,
29         ARMV7_PERFCTR_IFETCH_MISS               = 0x01,
30         ARMV7_PERFCTR_ITLB_MISS                 = 0x02,
31         ARMV7_PERFCTR_DCACHE_REFILL             = 0x03, /* L1 */
32         ARMV7_PERFCTR_DCACHE_ACCESS             = 0x04, /* L1 */
33         ARMV7_PERFCTR_DTLB_REFILL               = 0x05,
34         ARMV7_PERFCTR_DREAD                     = 0x06,
35         ARMV7_PERFCTR_DWRITE                    = 0x07,
36         ARMV7_PERFCTR_INSTR_EXECUTED            = 0x08,
37         ARMV7_PERFCTR_EXC_TAKEN                 = 0x09,
38         ARMV7_PERFCTR_EXC_EXECUTED              = 0x0A,
39         ARMV7_PERFCTR_CID_WRITE                 = 0x0B,
40         /* ARMV7_PERFCTR_PC_WRITE is equivalent to HW_BRANCH_INSTRUCTIONS.
41          * It counts:
42          *  - all branch instructions,
43          *  - instructions that explicitly write the PC,
44          *  - exception generating instructions.
45          */
46         ARMV7_PERFCTR_PC_WRITE                  = 0x0C,
47         ARMV7_PERFCTR_PC_IMM_BRANCH             = 0x0D,
48         ARMV7_PERFCTR_PC_PROC_RETURN            = 0x0E,
49         ARMV7_PERFCTR_UNALIGNED_ACCESS          = 0x0F,
50
51         /* These events are defined by the PMUv2 supplement (ARM DDI 0457A). */
52         ARMV7_PERFCTR_PC_BRANCH_MIS_PRED        = 0x10,
53         ARMV7_PERFCTR_CLOCK_CYCLES              = 0x11,
54         ARMV7_PERFCTR_PC_BRANCH_PRED            = 0x12,
55         ARMV7_PERFCTR_MEM_ACCESS                = 0x13,
56         ARMV7_PERFCTR_L1_ICACHE_ACCESS          = 0x14,
57         ARMV7_PERFCTR_L1_DCACHE_WB              = 0x15,
58         ARMV7_PERFCTR_L2_DCACHE_ACCESS          = 0x16,
59         ARMV7_PERFCTR_L2_DCACHE_REFILL          = 0x17,
60         ARMV7_PERFCTR_L2_DCACHE_WB              = 0x18,
61         ARMV7_PERFCTR_BUS_ACCESS                = 0x19,
62         ARMV7_PERFCTR_MEMORY_ERROR              = 0x1A,
63         ARMV7_PERFCTR_INSTR_SPEC                = 0x1B,
64         ARMV7_PERFCTR_TTBR_WRITE                = 0x1C,
65         ARMV7_PERFCTR_BUS_CYCLES                = 0x1D,
66
67         ARMV7_PERFCTR_CPU_CYCLES                = 0xFF
68 };
69
70 /* ARMv7 Cortex-A8 specific event types */
71 enum armv7_a8_perf_types {
72         ARMV7_PERFCTR_WRITE_BUFFER_FULL         = 0x40,
73         ARMV7_PERFCTR_L2_STORE_MERGED           = 0x41,
74         ARMV7_PERFCTR_L2_STORE_BUFF             = 0x42,
75         ARMV7_PERFCTR_L2_ACCESS                 = 0x43,
76         ARMV7_PERFCTR_L2_CACH_MISS              = 0x44,
77         ARMV7_PERFCTR_AXI_READ_CYCLES           = 0x45,
78         ARMV7_PERFCTR_AXI_WRITE_CYCLES          = 0x46,
79         ARMV7_PERFCTR_MEMORY_REPLAY             = 0x47,
80         ARMV7_PERFCTR_UNALIGNED_ACCESS_REPLAY   = 0x48,
81         ARMV7_PERFCTR_L1_DATA_MISS              = 0x49,
82         ARMV7_PERFCTR_L1_INST_MISS              = 0x4A,
83         ARMV7_PERFCTR_L1_DATA_COLORING          = 0x4B,
84         ARMV7_PERFCTR_L1_NEON_DATA              = 0x4C,
85         ARMV7_PERFCTR_L1_NEON_CACH_DATA         = 0x4D,
86         ARMV7_PERFCTR_L2_NEON                   = 0x4E,
87         ARMV7_PERFCTR_L2_NEON_HIT               = 0x4F,
88         ARMV7_PERFCTR_L1_INST                   = 0x50,
89         ARMV7_PERFCTR_PC_RETURN_MIS_PRED        = 0x51,
90         ARMV7_PERFCTR_PC_BRANCH_FAILED          = 0x52,
91         ARMV7_PERFCTR_PC_BRANCH_TAKEN           = 0x53,
92         ARMV7_PERFCTR_PC_BRANCH_EXECUTED        = 0x54,
93         ARMV7_PERFCTR_OP_EXECUTED               = 0x55,
94         ARMV7_PERFCTR_CYCLES_INST_STALL         = 0x56,
95         ARMV7_PERFCTR_CYCLES_INST               = 0x57,
96         ARMV7_PERFCTR_CYCLES_NEON_DATA_STALL    = 0x58,
97         ARMV7_PERFCTR_CYCLES_NEON_INST_STALL    = 0x59,
98         ARMV7_PERFCTR_NEON_CYCLES               = 0x5A,
99
100         ARMV7_PERFCTR_PMU0_EVENTS               = 0x70,
101         ARMV7_PERFCTR_PMU1_EVENTS               = 0x71,
102         ARMV7_PERFCTR_PMU_EVENTS                = 0x72,
103 };
104
105 /* ARMv7 Cortex-A9 specific event types */
106 enum armv7_a9_perf_types {
107         ARMV7_PERFCTR_JAVA_HW_BYTECODE_EXEC     = 0x40,
108         ARMV7_PERFCTR_JAVA_SW_BYTECODE_EXEC     = 0x41,
109         ARMV7_PERFCTR_JAZELLE_BRANCH_EXEC       = 0x42,
110
111         ARMV7_PERFCTR_COHERENT_LINE_MISS        = 0x50,
112         ARMV7_PERFCTR_COHERENT_LINE_HIT         = 0x51,
113
114         ARMV7_PERFCTR_ICACHE_DEP_STALL_CYCLES   = 0x60,
115         ARMV7_PERFCTR_DCACHE_DEP_STALL_CYCLES   = 0x61,
116         ARMV7_PERFCTR_TLB_MISS_DEP_STALL_CYCLES = 0x62,
117         ARMV7_PERFCTR_STREX_EXECUTED_PASSED     = 0x63,
118         ARMV7_PERFCTR_STREX_EXECUTED_FAILED     = 0x64,
119         ARMV7_PERFCTR_DATA_EVICTION             = 0x65,
120         ARMV7_PERFCTR_ISSUE_STAGE_NO_INST       = 0x66,
121         ARMV7_PERFCTR_ISSUE_STAGE_EMPTY         = 0x67,
122         ARMV7_PERFCTR_INST_OUT_OF_RENAME_STAGE  = 0x68,
123
124         ARMV7_PERFCTR_PREDICTABLE_FUNCT_RETURNS = 0x6E,
125
126         ARMV7_PERFCTR_MAIN_UNIT_EXECUTED_INST   = 0x70,
127         ARMV7_PERFCTR_SECOND_UNIT_EXECUTED_INST = 0x71,
128         ARMV7_PERFCTR_LD_ST_UNIT_EXECUTED_INST  = 0x72,
129         ARMV7_PERFCTR_FP_EXECUTED_INST          = 0x73,
130         ARMV7_PERFCTR_NEON_EXECUTED_INST        = 0x74,
131
132         ARMV7_PERFCTR_PLD_FULL_DEP_STALL_CYCLES = 0x80,
133         ARMV7_PERFCTR_DATA_WR_DEP_STALL_CYCLES  = 0x81,
134         ARMV7_PERFCTR_ITLB_MISS_DEP_STALL_CYCLES        = 0x82,
135         ARMV7_PERFCTR_DTLB_MISS_DEP_STALL_CYCLES        = 0x83,
136         ARMV7_PERFCTR_MICRO_ITLB_MISS_DEP_STALL_CYCLES  = 0x84,
137         ARMV7_PERFCTR_MICRO_DTLB_MISS_DEP_STALL_CYCLES  = 0x85,
138         ARMV7_PERFCTR_DMB_DEP_STALL_CYCLES      = 0x86,
139
140         ARMV7_PERFCTR_INTGR_CLK_ENABLED_CYCLES  = 0x8A,
141         ARMV7_PERFCTR_DATA_ENGINE_CLK_EN_CYCLES = 0x8B,
142
143         ARMV7_PERFCTR_ISB_INST                  = 0x90,
144         ARMV7_PERFCTR_DSB_INST                  = 0x91,
145         ARMV7_PERFCTR_DMB_INST                  = 0x92,
146         ARMV7_PERFCTR_EXT_INTERRUPTS            = 0x93,
147
148         ARMV7_PERFCTR_PLE_CACHE_LINE_RQST_COMPLETED     = 0xA0,
149         ARMV7_PERFCTR_PLE_CACHE_LINE_RQST_SKIPPED       = 0xA1,
150         ARMV7_PERFCTR_PLE_FIFO_FLUSH            = 0xA2,
151         ARMV7_PERFCTR_PLE_RQST_COMPLETED        = 0xA3,
152         ARMV7_PERFCTR_PLE_FIFO_OVERFLOW         = 0xA4,
153         ARMV7_PERFCTR_PLE_RQST_PROG             = 0xA5
154 };
155
156 /* ARMv7 Cortex-A5 specific event types */
157 enum armv7_a5_perf_types {
158         ARMV7_PERFCTR_IRQ_TAKEN                 = 0x86,
159         ARMV7_PERFCTR_FIQ_TAKEN                 = 0x87,
160
161         ARMV7_PERFCTR_EXT_MEM_RQST              = 0xc0,
162         ARMV7_PERFCTR_NC_EXT_MEM_RQST           = 0xc1,
163         ARMV7_PERFCTR_PREFETCH_LINEFILL         = 0xc2,
164         ARMV7_PERFCTR_PREFETCH_LINEFILL_DROP    = 0xc3,
165         ARMV7_PERFCTR_ENTER_READ_ALLOC          = 0xc4,
166         ARMV7_PERFCTR_READ_ALLOC                = 0xc5,
167
168         ARMV7_PERFCTR_STALL_SB_FULL             = 0xc9,
169 };
170
171 /* ARMv7 Cortex-A15 specific event types */
172 enum armv7_a15_perf_types {
173         ARMV7_PERFCTR_L1_DCACHE_READ_ACCESS     = 0x40,
174         ARMV7_PERFCTR_L1_DCACHE_WRITE_ACCESS    = 0x41,
175         ARMV7_PERFCTR_L1_DCACHE_READ_REFILL     = 0x42,
176         ARMV7_PERFCTR_L1_DCACHE_WRITE_REFILL    = 0x43,
177
178         ARMV7_PERFCTR_L1_DTLB_READ_REFILL       = 0x4C,
179         ARMV7_PERFCTR_L1_DTLB_WRITE_REFILL      = 0x4D,
180
181         ARMV7_PERFCTR_L2_DCACHE_READ_ACCESS     = 0x50,
182         ARMV7_PERFCTR_L2_DCACHE_WRITE_ACCESS    = 0x51,
183         ARMV7_PERFCTR_L2_DCACHE_READ_REFILL     = 0x52,
184         ARMV7_PERFCTR_L2_DCACHE_WRITE_REFILL    = 0x53,
185
186         ARMV7_PERFCTR_SPEC_PC_WRITE             = 0x76,
187 };
188
189 /*
190  * Cortex-A8 HW events mapping
191  *
192  * The hardware events that we support. We do support cache operations but
193  * we have harvard caches and no way to combine instruction and data
194  * accesses/misses in hardware.
195  */
196 static const unsigned armv7_a8_perf_map[PERF_COUNT_HW_MAX] = {
197         [PERF_COUNT_HW_CPU_CYCLES]          = ARMV7_PERFCTR_CPU_CYCLES,
198         [PERF_COUNT_HW_INSTRUCTIONS]        = ARMV7_PERFCTR_INSTR_EXECUTED,
199         [PERF_COUNT_HW_CACHE_REFERENCES]    = HW_OP_UNSUPPORTED,
200         [PERF_COUNT_HW_CACHE_MISSES]        = HW_OP_UNSUPPORTED,
201         [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
202         [PERF_COUNT_HW_BRANCH_MISSES]       = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
203         [PERF_COUNT_HW_BUS_CYCLES]          = ARMV7_PERFCTR_CLOCK_CYCLES,
204 };
205
206 static const unsigned armv7_a8_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
207                                           [PERF_COUNT_HW_CACHE_OP_MAX]
208                                           [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
209         [C(L1D)] = {
210                 /*
211                  * The performance counters don't differentiate between read
212                  * and write accesses/misses so this isn't strictly correct,
213                  * but it's the best we can do. Writes and reads get
214                  * combined.
215                  */
216                 [C(OP_READ)] = {
217                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_DCACHE_ACCESS,
218                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DCACHE_REFILL,
219                 },
220                 [C(OP_WRITE)] = {
221                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_DCACHE_ACCESS,
222                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DCACHE_REFILL,
223                 },
224                 [C(OP_PREFETCH)] = {
225                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
226                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
227                 },
228         },
229         [C(L1I)] = {
230                 [C(OP_READ)] = {
231                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_INST,
232                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_INST_MISS,
233                 },
234                 [C(OP_WRITE)] = {
235                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_INST,
236                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_INST_MISS,
237                 },
238                 [C(OP_PREFETCH)] = {
239                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
240                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
241                 },
242         },
243         [C(LL)] = {
244                 [C(OP_READ)] = {
245                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L2_ACCESS,
246                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L2_CACH_MISS,
247                 },
248                 [C(OP_WRITE)] = {
249                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L2_ACCESS,
250                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L2_CACH_MISS,
251                 },
252                 [C(OP_PREFETCH)] = {
253                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
254                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
255                 },
256         },
257         [C(DTLB)] = {
258                 [C(OP_READ)] = {
259                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
260                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
261                 },
262                 [C(OP_WRITE)] = {
263                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
264                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
265                 },
266                 [C(OP_PREFETCH)] = {
267                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
268                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
269                 },
270         },
271         [C(ITLB)] = {
272                 [C(OP_READ)] = {
273                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
274                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
275                 },
276                 [C(OP_WRITE)] = {
277                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
278                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
279                 },
280                 [C(OP_PREFETCH)] = {
281                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
282                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
283                 },
284         },
285         [C(BPU)] = {
286                 [C(OP_READ)] = {
287                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_WRITE,
288                         [C(RESULT_MISS)]
289                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
290                 },
291                 [C(OP_WRITE)] = {
292                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_WRITE,
293                         [C(RESULT_MISS)]
294                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
295                 },
296                 [C(OP_PREFETCH)] = {
297                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
298                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
299                 },
300         },
301         [C(NODE)] = {
302                 [C(OP_READ)] = {
303                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
304                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
305                 },
306                 [C(OP_WRITE)] = {
307                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
308                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
309                 },
310                 [C(OP_PREFETCH)] = {
311                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
312                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
313                 },
314         },
315 };
316
317 /*
318  * Cortex-A9 HW events mapping
319  */
320 static const unsigned armv7_a9_perf_map[PERF_COUNT_HW_MAX] = {
321         [PERF_COUNT_HW_CPU_CYCLES]          = ARMV7_PERFCTR_CPU_CYCLES,
322         [PERF_COUNT_HW_INSTRUCTIONS]        =
323                                         ARMV7_PERFCTR_INST_OUT_OF_RENAME_STAGE,
324         [PERF_COUNT_HW_CACHE_REFERENCES]    = ARMV7_PERFCTR_COHERENT_LINE_HIT,
325         [PERF_COUNT_HW_CACHE_MISSES]        = ARMV7_PERFCTR_COHERENT_LINE_MISS,
326         [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
327         [PERF_COUNT_HW_BRANCH_MISSES]       = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
328         [PERF_COUNT_HW_BUS_CYCLES]          = ARMV7_PERFCTR_CLOCK_CYCLES,
329 };
330
331 static const unsigned armv7_a9_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
332                                           [PERF_COUNT_HW_CACHE_OP_MAX]
333                                           [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
334         [C(L1D)] = {
335                 /*
336                  * The performance counters don't differentiate between read
337                  * and write accesses/misses so this isn't strictly correct,
338                  * but it's the best we can do. Writes and reads get
339                  * combined.
340                  */
341                 [C(OP_READ)] = {
342                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_DCACHE_ACCESS,
343                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DCACHE_REFILL,
344                 },
345                 [C(OP_WRITE)] = {
346                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_DCACHE_ACCESS,
347                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DCACHE_REFILL,
348                 },
349                 [C(OP_PREFETCH)] = {
350                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
351                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
352                 },
353         },
354         [C(L1I)] = {
355                 [C(OP_READ)] = {
356                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
357                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
358                 },
359                 [C(OP_WRITE)] = {
360                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
361                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
362                 },
363                 [C(OP_PREFETCH)] = {
364                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
365                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
366                 },
367         },
368         [C(LL)] = {
369                 [C(OP_READ)] = {
370                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
371                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
372                 },
373                 [C(OP_WRITE)] = {
374                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
375                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
376                 },
377                 [C(OP_PREFETCH)] = {
378                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
379                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
380                 },
381         },
382         [C(DTLB)] = {
383                 [C(OP_READ)] = {
384                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
385                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
386                 },
387                 [C(OP_WRITE)] = {
388                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
389                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
390                 },
391                 [C(OP_PREFETCH)] = {
392                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
393                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
394                 },
395         },
396         [C(ITLB)] = {
397                 [C(OP_READ)] = {
398                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
399                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
400                 },
401                 [C(OP_WRITE)] = {
402                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
403                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
404                 },
405                 [C(OP_PREFETCH)] = {
406                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
407                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
408                 },
409         },
410         [C(BPU)] = {
411                 [C(OP_READ)] = {
412                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_WRITE,
413                         [C(RESULT_MISS)]
414                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
415                 },
416                 [C(OP_WRITE)] = {
417                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_WRITE,
418                         [C(RESULT_MISS)]
419                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
420                 },
421                 [C(OP_PREFETCH)] = {
422                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
423                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
424                 },
425         },
426         [C(NODE)] = {
427                 [C(OP_READ)] = {
428                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
429                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
430                 },
431                 [C(OP_WRITE)] = {
432                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
433                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
434                 },
435                 [C(OP_PREFETCH)] = {
436                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
437                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
438                 },
439         },
440 };
441
442 /*
443  * Cortex-A5 HW events mapping
444  */
445 static const unsigned armv7_a5_perf_map[PERF_COUNT_HW_MAX] = {
446         [PERF_COUNT_HW_CPU_CYCLES]          = ARMV7_PERFCTR_CPU_CYCLES,
447         [PERF_COUNT_HW_INSTRUCTIONS]        = ARMV7_PERFCTR_INSTR_EXECUTED,
448         [PERF_COUNT_HW_CACHE_REFERENCES]    = HW_OP_UNSUPPORTED,
449         [PERF_COUNT_HW_CACHE_MISSES]        = HW_OP_UNSUPPORTED,
450         [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
451         [PERF_COUNT_HW_BRANCH_MISSES]       = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
452         [PERF_COUNT_HW_BUS_CYCLES]          = HW_OP_UNSUPPORTED,
453 };
454
455 static const unsigned armv7_a5_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
456                                         [PERF_COUNT_HW_CACHE_OP_MAX]
457                                         [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
458         [C(L1D)] = {
459                 [C(OP_READ)] = {
460                         [C(RESULT_ACCESS)]
461                                         = ARMV7_PERFCTR_DCACHE_ACCESS,
462                         [C(RESULT_MISS)]
463                                         = ARMV7_PERFCTR_DCACHE_REFILL,
464                 },
465                 [C(OP_WRITE)] = {
466                         [C(RESULT_ACCESS)]
467                                         = ARMV7_PERFCTR_DCACHE_ACCESS,
468                         [C(RESULT_MISS)]
469                                         = ARMV7_PERFCTR_DCACHE_REFILL,
470                 },
471                 [C(OP_PREFETCH)] = {
472                         [C(RESULT_ACCESS)]
473                                         = ARMV7_PERFCTR_PREFETCH_LINEFILL,
474                         [C(RESULT_MISS)]
475                                         = ARMV7_PERFCTR_PREFETCH_LINEFILL_DROP,
476                 },
477         },
478         [C(L1I)] = {
479                 [C(OP_READ)] = {
480                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
481                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
482                 },
483                 [C(OP_WRITE)] = {
484                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
485                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
486                 },
487                 /*
488                  * The prefetch counters don't differentiate between the I
489                  * side and the D side.
490                  */
491                 [C(OP_PREFETCH)] = {
492                         [C(RESULT_ACCESS)]
493                                         = ARMV7_PERFCTR_PREFETCH_LINEFILL,
494                         [C(RESULT_MISS)]
495                                         = ARMV7_PERFCTR_PREFETCH_LINEFILL_DROP,
496                 },
497         },
498         [C(LL)] = {
499                 [C(OP_READ)] = {
500                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
501                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
502                 },
503                 [C(OP_WRITE)] = {
504                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
505                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
506                 },
507                 [C(OP_PREFETCH)] = {
508                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
509                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
510                 },
511         },
512         [C(DTLB)] = {
513                 [C(OP_READ)] = {
514                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
515                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
516                 },
517                 [C(OP_WRITE)] = {
518                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
519                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
520                 },
521                 [C(OP_PREFETCH)] = {
522                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
523                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
524                 },
525         },
526         [C(ITLB)] = {
527                 [C(OP_READ)] = {
528                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
529                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
530                 },
531                 [C(OP_WRITE)] = {
532                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
533                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
534                 },
535                 [C(OP_PREFETCH)] = {
536                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
537                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
538                 },
539         },
540         [C(BPU)] = {
541                 [C(OP_READ)] = {
542                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
543                         [C(RESULT_MISS)]
544                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
545                 },
546                 [C(OP_WRITE)] = {
547                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
548                         [C(RESULT_MISS)]
549                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
550                 },
551                 [C(OP_PREFETCH)] = {
552                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
553                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
554                 },
555         },
556 };
557
558 /*
559  * Cortex-A15 HW events mapping
560  */
561 static const unsigned armv7_a15_perf_map[PERF_COUNT_HW_MAX] = {
562         [PERF_COUNT_HW_CPU_CYCLES]          = ARMV7_PERFCTR_CPU_CYCLES,
563         [PERF_COUNT_HW_INSTRUCTIONS]        = ARMV7_PERFCTR_INSTR_EXECUTED,
564         [PERF_COUNT_HW_CACHE_REFERENCES]    = HW_OP_UNSUPPORTED,
565         [PERF_COUNT_HW_CACHE_MISSES]        = HW_OP_UNSUPPORTED,
566         [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_SPEC_PC_WRITE,
567         [PERF_COUNT_HW_BRANCH_MISSES]       = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
568         [PERF_COUNT_HW_BUS_CYCLES]          = ARMV7_PERFCTR_BUS_CYCLES,
569 };
570
571 static const unsigned armv7_a15_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
572                                         [PERF_COUNT_HW_CACHE_OP_MAX]
573                                         [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
574         [C(L1D)] = {
575                 [C(OP_READ)] = {
576                         [C(RESULT_ACCESS)]
577                                         = ARMV7_PERFCTR_L1_DCACHE_READ_ACCESS,
578                         [C(RESULT_MISS)]
579                                         = ARMV7_PERFCTR_L1_DCACHE_READ_REFILL,
580                 },
581                 [C(OP_WRITE)] = {
582                         [C(RESULT_ACCESS)]
583                                         = ARMV7_PERFCTR_L1_DCACHE_WRITE_ACCESS,
584                         [C(RESULT_MISS)]
585                                         = ARMV7_PERFCTR_L1_DCACHE_WRITE_REFILL,
586                 },
587                 [C(OP_PREFETCH)] = {
588                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
589                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
590                 },
591         },
592         [C(L1I)] = {
593                 /*
594                  * Not all performance counters differentiate between read
595                  * and write accesses/misses so we're not always strictly
596                  * correct, but it's the best we can do. Writes and reads get
597                  * combined in these cases.
598                  */
599                 [C(OP_READ)] = {
600                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
601                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
602                 },
603                 [C(OP_WRITE)] = {
604                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
605                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
606                 },
607                 [C(OP_PREFETCH)] = {
608                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
609                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
610                 },
611         },
612         [C(LL)] = {
613                 [C(OP_READ)] = {
614                         [C(RESULT_ACCESS)]
615                                         = ARMV7_PERFCTR_L2_DCACHE_READ_ACCESS,
616                         [C(RESULT_MISS)]
617                                         = ARMV7_PERFCTR_L2_DCACHE_READ_REFILL,
618                 },
619                 [C(OP_WRITE)] = {
620                         [C(RESULT_ACCESS)]
621                                         = ARMV7_PERFCTR_L2_DCACHE_WRITE_ACCESS,
622                         [C(RESULT_MISS)]
623                                         = ARMV7_PERFCTR_L2_DCACHE_WRITE_REFILL,
624                 },
625                 [C(OP_PREFETCH)] = {
626                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
627                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
628                 },
629         },
630         [C(DTLB)] = {
631                 [C(OP_READ)] = {
632                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
633                         [C(RESULT_MISS)]
634                                         = ARMV7_PERFCTR_L1_DTLB_READ_REFILL,
635                 },
636                 [C(OP_WRITE)] = {
637                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
638                         [C(RESULT_MISS)]
639                                         = ARMV7_PERFCTR_L1_DTLB_WRITE_REFILL,
640                 },
641                 [C(OP_PREFETCH)] = {
642                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
643                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
644                 },
645         },
646         [C(ITLB)] = {
647                 [C(OP_READ)] = {
648                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
649                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
650                 },
651                 [C(OP_WRITE)] = {
652                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
653                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
654                 },
655                 [C(OP_PREFETCH)] = {
656                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
657                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
658                 },
659         },
660         [C(BPU)] = {
661                 [C(OP_READ)] = {
662                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
663                         [C(RESULT_MISS)]
664                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
665                 },
666                 [C(OP_WRITE)] = {
667                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
668                         [C(RESULT_MISS)]
669                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
670                 },
671                 [C(OP_PREFETCH)] = {
672                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
673                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
674                 },
675         },
676 };
677
678 /*
679  * Perf Events' indices
680  */
681 #define ARMV7_IDX_CYCLE_COUNTER 0
682 #define ARMV7_IDX_COUNTER0      1
683 #define ARMV7_IDX_COUNTER_LAST  (ARMV7_IDX_CYCLE_COUNTER + armpmu->num_events - 1)
684
685 #define ARMV7_MAX_COUNTERS      32
686 #define ARMV7_COUNTER_MASK      (ARMV7_MAX_COUNTERS - 1)
687
688 /*
689  * ARMv7 low level PMNC access
690  */
691
692 /*
693  * Perf Event to low level counters mapping
694  */
695 #define ARMV7_IDX_TO_COUNTER(x) \
696         (((x) - ARMV7_IDX_COUNTER0) & ARMV7_COUNTER_MASK)
697
698 /*
699  * Per-CPU PMNC: config reg
700  */
701 #define ARMV7_PMNC_E            (1 << 0) /* Enable all counters */
702 #define ARMV7_PMNC_P            (1 << 1) /* Reset all counters */
703 #define ARMV7_PMNC_C            (1 << 2) /* Cycle counter reset */
704 #define ARMV7_PMNC_D            (1 << 3) /* CCNT counts every 64th cpu cycle */
705 #define ARMV7_PMNC_X            (1 << 4) /* Export to ETM */
706 #define ARMV7_PMNC_DP           (1 << 5) /* Disable CCNT if non-invasive debug*/
707 #define ARMV7_PMNC_N_SHIFT      11       /* Number of counters supported */
708 #define ARMV7_PMNC_N_MASK       0x1f
709 #define ARMV7_PMNC_MASK         0x3f     /* Mask for writable bits */
710
711 /*
712  * EVTSEL: Event selection reg
713  */
714 #define ARMV7_EVTSEL_MASK       0xff            /* Mask for writable bits */
715
716 /*
717  * FLAG: counters overflow flag status reg
718  */
719 #define ARMV7_FLAG_MASK         0xffffffff      /* Mask for writable bits */
720 #define ARMV7_OVERFLOWED_MASK   ARMV7_FLAG_MASK
721
722 static inline u32 armv7_pmnc_read(void)
723 {
724         u32 val;
725         asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r"(val));
726         return val;
727 }
728
729 static inline void armv7_pmnc_write(u32 val)
730 {
731         val &= ARMV7_PMNC_MASK;
732         isb();
733         asm volatile("mcr p15, 0, %0, c9, c12, 0" : : "r"(val));
734 }
735
736 static inline int armv7_pmnc_has_overflowed(u32 pmnc)
737 {
738         return pmnc & ARMV7_OVERFLOWED_MASK;
739 }
740
741 static inline int armv7_pmnc_counter_valid(int idx)
742 {
743         return idx >= ARMV7_IDX_CYCLE_COUNTER && idx <= ARMV7_IDX_COUNTER_LAST;
744 }
745
746 static inline int armv7_pmnc_counter_has_overflowed(u32 pmnc, int idx)
747 {
748         int ret = 0;
749         u32 counter;
750
751         if (!armv7_pmnc_counter_valid(idx)) {
752                 pr_err("CPU%u checking wrong counter %d overflow status\n",
753                         smp_processor_id(), idx);
754         } else {
755                 counter = ARMV7_IDX_TO_COUNTER(idx);
756                 ret = pmnc & BIT(counter);
757         }
758
759         return ret;
760 }
761
762 static inline int armv7_pmnc_select_counter(int idx)
763 {
764         u32 counter;
765
766         if (!armv7_pmnc_counter_valid(idx)) {
767                 pr_err("CPU%u selecting wrong PMNC counter %d\n",
768                         smp_processor_id(), idx);
769                 return -EINVAL;
770         }
771
772         counter = ARMV7_IDX_TO_COUNTER(idx);
773         asm volatile("mcr p15, 0, %0, c9, c12, 5" : : "r" (counter));
774         isb();
775
776         return idx;
777 }
778
779 static inline u32 armv7pmu_read_counter(int idx)
780 {
781         u32 value = 0;
782
783         if (!armv7_pmnc_counter_valid(idx))
784                 pr_err("CPU%u reading wrong counter %d\n",
785                         smp_processor_id(), idx);
786         else if (idx == ARMV7_IDX_CYCLE_COUNTER)
787                 asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (value));
788         else if (armv7_pmnc_select_counter(idx) == idx)
789                 asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (value));
790
791         return value;
792 }
793
794 static inline void armv7pmu_write_counter(int idx, u32 value)
795 {
796         if (!armv7_pmnc_counter_valid(idx))
797                 pr_err("CPU%u writing wrong counter %d\n",
798                         smp_processor_id(), idx);
799         else if (idx == ARMV7_IDX_CYCLE_COUNTER)
800                 asm volatile("mcr p15, 0, %0, c9, c13, 0" : : "r" (value));
801         else if (armv7_pmnc_select_counter(idx) == idx)
802                 asm volatile("mcr p15, 0, %0, c9, c13, 2" : : "r" (value));
803 }
804
805 static inline void armv7_pmnc_write_evtsel(int idx, u32 val)
806 {
807         if (armv7_pmnc_select_counter(idx) == idx) {
808                 val &= ARMV7_EVTSEL_MASK;
809                 asm volatile("mcr p15, 0, %0, c9, c13, 1" : : "r" (val));
810         }
811 }
812
813 static inline int armv7_pmnc_enable_counter(int idx)
814 {
815         u32 counter;
816
817         if (!armv7_pmnc_counter_valid(idx)) {
818                 pr_err("CPU%u enabling wrong PMNC counter %d\n",
819                         smp_processor_id(), idx);
820                 return -EINVAL;
821         }
822
823         counter = ARMV7_IDX_TO_COUNTER(idx);
824         asm volatile("mcr p15, 0, %0, c9, c12, 1" : : "r" (BIT(counter)));
825         return idx;
826 }
827
828 static inline int armv7_pmnc_disable_counter(int idx)
829 {
830         u32 counter;
831
832         if (!armv7_pmnc_counter_valid(idx)) {
833                 pr_err("CPU%u disabling wrong PMNC counter %d\n",
834                         smp_processor_id(), idx);
835                 return -EINVAL;
836         }
837
838         counter = ARMV7_IDX_TO_COUNTER(idx);
839         asm volatile("mcr p15, 0, %0, c9, c12, 2" : : "r" (BIT(counter)));
840         return idx;
841 }
842
843 static inline int armv7_pmnc_enable_intens(int idx)
844 {
845         u32 counter;
846
847         if (!armv7_pmnc_counter_valid(idx)) {
848                 pr_err("CPU%u enabling wrong PMNC counter IRQ enable %d\n",
849                         smp_processor_id(), idx);
850                 return -EINVAL;
851         }
852
853         counter = ARMV7_IDX_TO_COUNTER(idx);
854         asm volatile("mcr p15, 0, %0, c9, c14, 1" : : "r" (BIT(counter)));
855         return idx;
856 }
857
858 static inline int armv7_pmnc_disable_intens(int idx)
859 {
860         u32 counter;
861
862         if (!armv7_pmnc_counter_valid(idx)) {
863                 pr_err("CPU%u disabling wrong PMNC counter IRQ enable %d\n",
864                         smp_processor_id(), idx);
865                 return -EINVAL;
866         }
867
868         counter = ARMV7_IDX_TO_COUNTER(idx);
869         asm volatile("mcr p15, 0, %0, c9, c14, 2" : : "r" (BIT(counter)));
870         return idx;
871 }
872
873 static inline u32 armv7_pmnc_getreset_flags(void)
874 {
875         u32 val;
876
877         /* Read */
878         asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
879
880         /* Write to clear flags */
881         val &= ARMV7_FLAG_MASK;
882         asm volatile("mcr p15, 0, %0, c9, c12, 3" : : "r" (val));
883
884         return val;
885 }
886
887 #ifdef DEBUG
888 static void armv7_pmnc_dump_regs(void)
889 {
890         u32 val;
891         unsigned int cnt;
892
893         printk(KERN_INFO "PMNC registers dump:\n");
894
895         asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r" (val));
896         printk(KERN_INFO "PMNC  =0x%08x\n", val);
897
898         asm volatile("mrc p15, 0, %0, c9, c12, 1" : "=r" (val));
899         printk(KERN_INFO "CNTENS=0x%08x\n", val);
900
901         asm volatile("mrc p15, 0, %0, c9, c14, 1" : "=r" (val));
902         printk(KERN_INFO "INTENS=0x%08x\n", val);
903
904         asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
905         printk(KERN_INFO "FLAGS =0x%08x\n", val);
906
907         asm volatile("mrc p15, 0, %0, c9, c12, 5" : "=r" (val));
908         printk(KERN_INFO "SELECT=0x%08x\n", val);
909
910         asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (val));
911         printk(KERN_INFO "CCNT  =0x%08x\n", val);
912
913         for (cnt = ARMV7_IDX_COUNTER0; cnt <= ARMV7_IDX_COUNTER_LAST; cnt++) {
914                 armv7_pmnc_select_counter(cnt);
915                 asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (val));
916                 printk(KERN_INFO "CNT[%d] count =0x%08x\n",
917                         ARMV7_IDX_TO_COUNTER(cnt), val);
918                 asm volatile("mrc p15, 0, %0, c9, c13, 1" : "=r" (val));
919                 printk(KERN_INFO "CNT[%d] evtsel=0x%08x\n",
920                         ARMV7_IDX_TO_COUNTER(cnt), val);
921         }
922 }
923 #endif
924
925 static void armv7pmu_enable_event(struct hw_perf_event *hwc, int idx)
926 {
927         unsigned long flags;
928
929         /*
930          * Enable counter and interrupt, and set the counter to count
931          * the event that we're interested in.
932          */
933         raw_spin_lock_irqsave(&pmu_lock, flags);
934
935         /*
936          * Disable counter
937          */
938         armv7_pmnc_disable_counter(idx);
939
940         /*
941          * Set event (if destined for PMNx counters)
942          * We don't need to set the event if it's a cycle count
943          */
944         if (idx != ARMV7_IDX_CYCLE_COUNTER)
945                 armv7_pmnc_write_evtsel(idx, hwc->config_base);
946
947         /*
948          * Enable interrupt for this counter
949          */
950         armv7_pmnc_enable_intens(idx);
951
952         /*
953          * Enable counter
954          */
955         armv7_pmnc_enable_counter(idx);
956
957         raw_spin_unlock_irqrestore(&pmu_lock, flags);
958 }
959
960 static void armv7pmu_disable_event(struct hw_perf_event *hwc, int idx)
961 {
962         unsigned long flags;
963
964         /*
965          * Disable counter and interrupt
966          */
967         raw_spin_lock_irqsave(&pmu_lock, flags);
968
969         /*
970          * Disable counter
971          */
972         armv7_pmnc_disable_counter(idx);
973
974         /*
975          * Disable interrupt for this counter
976          */
977         armv7_pmnc_disable_intens(idx);
978
979         raw_spin_unlock_irqrestore(&pmu_lock, flags);
980 }
981
982 static irqreturn_t armv7pmu_handle_irq(int irq_num, void *dev)
983 {
984         u32 pmnc;
985         struct perf_sample_data data;
986         struct cpu_hw_events *cpuc;
987         struct pt_regs *regs;
988         int idx;
989
990         /*
991          * Get and reset the IRQ flags
992          */
993         pmnc = armv7_pmnc_getreset_flags();
994
995         /*
996          * Did an overflow occur?
997          */
998         if (!armv7_pmnc_has_overflowed(pmnc))
999                 return IRQ_NONE;
1000
1001         /*
1002          * Handle the counter(s) overflow(s)
1003          */
1004         regs = get_irq_regs();
1005
1006         perf_sample_data_init(&data, 0);
1007
1008         cpuc = &__get_cpu_var(cpu_hw_events);
1009         for (idx = 0; idx < armpmu->num_events; ++idx) {
1010                 struct perf_event *event = cpuc->events[idx];
1011                 struct hw_perf_event *hwc;
1012
1013                 if (!test_bit(idx, cpuc->active_mask))
1014                         continue;
1015
1016                 /*
1017                  * We have a single interrupt for all counters. Check that
1018                  * each counter has overflowed before we process it.
1019                  */
1020                 if (!armv7_pmnc_counter_has_overflowed(pmnc, idx))
1021                         continue;
1022
1023                 hwc = &event->hw;
1024                 armpmu_event_update(event, hwc, idx, 1);
1025                 data.period = event->hw.last_period;
1026                 if (!armpmu_event_set_period(event, hwc, idx))
1027                         continue;
1028
1029                 if (perf_event_overflow(event, &data, regs))
1030                         armpmu->disable(hwc, idx);
1031         }
1032
1033         /*
1034          * Handle the pending perf events.
1035          *
1036          * Note: this call *must* be run with interrupts disabled. For
1037          * platforms that can have the PMU interrupts raised as an NMI, this
1038          * will not work.
1039          */
1040         irq_work_run();
1041
1042         return IRQ_HANDLED;
1043 }
1044
1045 static void armv7pmu_start(void)
1046 {
1047         unsigned long flags;
1048
1049         raw_spin_lock_irqsave(&pmu_lock, flags);
1050         /* Enable all counters */
1051         armv7_pmnc_write(armv7_pmnc_read() | ARMV7_PMNC_E);
1052         raw_spin_unlock_irqrestore(&pmu_lock, flags);
1053 }
1054
1055 static void armv7pmu_stop(void)
1056 {
1057         unsigned long flags;
1058
1059         raw_spin_lock_irqsave(&pmu_lock, flags);
1060         /* Disable all counters */
1061         armv7_pmnc_write(armv7_pmnc_read() & ~ARMV7_PMNC_E);
1062         raw_spin_unlock_irqrestore(&pmu_lock, flags);
1063 }
1064
1065 static int armv7pmu_get_event_idx(struct cpu_hw_events *cpuc,
1066                                   struct hw_perf_event *event)
1067 {
1068         int idx;
1069
1070         /* Always place a cycle counter into the cycle counter. */
1071         if (event->config_base == ARMV7_PERFCTR_CPU_CYCLES) {
1072                 if (test_and_set_bit(ARMV7_IDX_CYCLE_COUNTER, cpuc->used_mask))
1073                         return -EAGAIN;
1074
1075                 return ARMV7_IDX_CYCLE_COUNTER;
1076         }
1077
1078         /*
1079          * For anything other than a cycle counter, try and use
1080          * the events counters
1081          */
1082         for (idx = ARMV7_IDX_COUNTER0; idx < armpmu->num_events; ++idx) {
1083                 if (!test_and_set_bit(idx, cpuc->used_mask))
1084                         return idx;
1085         }
1086
1087         /* The counters are all in use. */
1088         return -EAGAIN;
1089 }
1090
1091 static void armv7pmu_reset(void *info)
1092 {
1093         u32 idx, nb_cnt = armpmu->num_events;
1094
1095         /* The counter and interrupt enable registers are unknown at reset. */
1096         for (idx = ARMV7_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx)
1097                 armv7pmu_disable_event(NULL, idx);
1098
1099         /* Initialize & Reset PMNC: C and P bits */
1100         armv7_pmnc_write(ARMV7_PMNC_P | ARMV7_PMNC_C);
1101 }
1102
1103 static struct arm_pmu armv7pmu = {
1104         .handle_irq             = armv7pmu_handle_irq,
1105         .enable                 = armv7pmu_enable_event,
1106         .disable                = armv7pmu_disable_event,
1107         .read_counter           = armv7pmu_read_counter,
1108         .write_counter          = armv7pmu_write_counter,
1109         .get_event_idx          = armv7pmu_get_event_idx,
1110         .start                  = armv7pmu_start,
1111         .stop                   = armv7pmu_stop,
1112         .reset                  = armv7pmu_reset,
1113         .raw_event_mask         = 0xFF,
1114         .max_period             = (1LLU << 32) - 1,
1115 };
1116
1117 static u32 __init armv7_read_num_pmnc_events(void)
1118 {
1119         u32 nb_cnt;
1120
1121         /* Read the nb of CNTx counters supported from PMNC */
1122         nb_cnt = (armv7_pmnc_read() >> ARMV7_PMNC_N_SHIFT) & ARMV7_PMNC_N_MASK;
1123
1124         /* Add the CPU cycles counter and return */
1125         return nb_cnt + 1;
1126 }
1127
1128 static struct arm_pmu *__init armv7_a8_pmu_init(void)
1129 {
1130         armv7pmu.id             = ARM_PERF_PMU_ID_CA8;
1131         armv7pmu.name           = "ARMv7 Cortex-A8";
1132         armv7pmu.cache_map      = &armv7_a8_perf_cache_map;
1133         armv7pmu.event_map      = &armv7_a8_perf_map;
1134         armv7pmu.num_events     = armv7_read_num_pmnc_events();
1135         return &armv7pmu;
1136 }
1137
1138 static struct arm_pmu *__init armv7_a9_pmu_init(void)
1139 {
1140         armv7pmu.id             = ARM_PERF_PMU_ID_CA9;
1141         armv7pmu.name           = "ARMv7 Cortex-A9";
1142         armv7pmu.cache_map      = &armv7_a9_perf_cache_map;
1143         armv7pmu.event_map      = &armv7_a9_perf_map;
1144         armv7pmu.num_events     = armv7_read_num_pmnc_events();
1145         return &armv7pmu;
1146 }
1147
1148 static struct arm_pmu *__init armv7_a5_pmu_init(void)
1149 {
1150         armv7pmu.id             = ARM_PERF_PMU_ID_CA5;
1151         armv7pmu.name           = "ARMv7 Cortex-A5";
1152         armv7pmu.cache_map      = &armv7_a5_perf_cache_map;
1153         armv7pmu.event_map      = &armv7_a5_perf_map;
1154         armv7pmu.num_events     = armv7_read_num_pmnc_events();
1155         return &armv7pmu;
1156 }
1157
1158 static struct arm_pmu *__init armv7_a15_pmu_init(void)
1159 {
1160         armv7pmu.id             = ARM_PERF_PMU_ID_CA15;
1161         armv7pmu.name           = "ARMv7 Cortex-A15";
1162         armv7pmu.cache_map      = &armv7_a15_perf_cache_map;
1163         armv7pmu.event_map      = &armv7_a15_perf_map;
1164         armv7pmu.num_events     = armv7_read_num_pmnc_events();
1165         return &armv7pmu;
1166 }
1167 #else
1168 static struct arm_pmu *__init armv7_a8_pmu_init(void)
1169 {
1170         return NULL;
1171 }
1172
1173 static struct arm_pmu *__init armv7_a9_pmu_init(void)
1174 {
1175         return NULL;
1176 }
1177
1178 static struct arm_pmu *__init armv7_a5_pmu_init(void)
1179 {
1180         return NULL;
1181 }
1182
1183 static struct arm_pmu *__init armv7_a15_pmu_init(void)
1184 {
1185         return NULL;
1186 }
1187 #endif  /* CONFIG_CPU_V7 */