ARM: rockchip: rk3228: implement function rk3228_restart
[firefly-linux-kernel-4.4.55.git] / arch / arm / kernel / perf_event_v7.c
1 /*
2  * ARMv7 Cortex-A8 and Cortex-A9 Performance Events handling code.
3  *
4  * ARMv7 support: Jean Pihet <jpihet@mvista.com>
5  * 2010 (c) MontaVista Software, LLC.
6  *
7  * Copied from ARMv6 code, with the low level code inspired
8  *  by the ARMv7 Oprofile code.
9  *
10  * Cortex-A8 has up to 4 configurable performance counters and
11  *  a single cycle counter.
12  * Cortex-A9 has up to 31 configurable performance counters and
13  *  a single cycle counter.
14  *
15  * All counters can be enabled/disabled and IRQ masked separately. The cycle
16  *  counter and all 4 performance counters together can be reset separately.
17  */
18
19 #ifdef CONFIG_CPU_V7
20
21 /*
22  * Common ARMv7 event types
23  *
24  * Note: An implementation may not be able to count all of these events
25  * but the encodings are considered to be `reserved' in the case that
26  * they are not available.
27  */
28 enum armv7_perf_types {
29         ARMV7_PERFCTR_PMNC_SW_INCR                      = 0x00,
30         ARMV7_PERFCTR_L1_ICACHE_REFILL                  = 0x01,
31         ARMV7_PERFCTR_ITLB_REFILL                       = 0x02,
32         ARMV7_PERFCTR_L1_DCACHE_REFILL                  = 0x03,
33         ARMV7_PERFCTR_L1_DCACHE_ACCESS                  = 0x04,
34         ARMV7_PERFCTR_DTLB_REFILL                       = 0x05,
35         ARMV7_PERFCTR_MEM_READ                          = 0x06,
36         ARMV7_PERFCTR_MEM_WRITE                         = 0x07,
37         ARMV7_PERFCTR_INSTR_EXECUTED                    = 0x08,
38         ARMV7_PERFCTR_EXC_TAKEN                         = 0x09,
39         ARMV7_PERFCTR_EXC_EXECUTED                      = 0x0A,
40         ARMV7_PERFCTR_CID_WRITE                         = 0x0B,
41
42         /*
43          * ARMV7_PERFCTR_PC_WRITE is equivalent to HW_BRANCH_INSTRUCTIONS.
44          * It counts:
45          *  - all (taken) branch instructions,
46          *  - instructions that explicitly write the PC,
47          *  - exception generating instructions.
48          */
49         ARMV7_PERFCTR_PC_WRITE                          = 0x0C,
50         ARMV7_PERFCTR_PC_IMM_BRANCH                     = 0x0D,
51         ARMV7_PERFCTR_PC_PROC_RETURN                    = 0x0E,
52         ARMV7_PERFCTR_MEM_UNALIGNED_ACCESS              = 0x0F,
53         ARMV7_PERFCTR_PC_BRANCH_MIS_PRED                = 0x10,
54         ARMV7_PERFCTR_CLOCK_CYCLES                      = 0x11,
55         ARMV7_PERFCTR_PC_BRANCH_PRED                    = 0x12,
56
57         /* These events are defined by the PMUv2 supplement (ARM DDI 0457A). */
58         ARMV7_PERFCTR_MEM_ACCESS                        = 0x13,
59         ARMV7_PERFCTR_L1_ICACHE_ACCESS                  = 0x14,
60         ARMV7_PERFCTR_L1_DCACHE_WB                      = 0x15,
61         ARMV7_PERFCTR_L2_CACHE_ACCESS                   = 0x16,
62         ARMV7_PERFCTR_L2_CACHE_REFILL                   = 0x17,
63         ARMV7_PERFCTR_L2_CACHE_WB                       = 0x18,
64         ARMV7_PERFCTR_BUS_ACCESS                        = 0x19,
65         ARMV7_PERFCTR_MEM_ERROR                         = 0x1A,
66         ARMV7_PERFCTR_INSTR_SPEC                        = 0x1B,
67         ARMV7_PERFCTR_TTBR_WRITE                        = 0x1C,
68         ARMV7_PERFCTR_BUS_CYCLES                        = 0x1D,
69
70         ARMV7_PERFCTR_CPU_CYCLES                        = 0xFF
71 };
72
73 /* ARMv7 Cortex-A8 specific event types */
74 enum armv7_a8_perf_types {
75         ARMV7_A8_PERFCTR_L2_CACHE_ACCESS                = 0x43,
76         ARMV7_A8_PERFCTR_L2_CACHE_REFILL                = 0x44,
77         ARMV7_A8_PERFCTR_L1_ICACHE_ACCESS               = 0x50,
78         ARMV7_A8_PERFCTR_STALL_ISIDE                    = 0x56,
79 };
80
81 /* ARMv7 Cortex-A9 specific event types */
82 enum armv7_a9_perf_types {
83         ARMV7_A9_PERFCTR_INSTR_CORE_RENAME              = 0x68,
84         ARMV7_A9_PERFCTR_STALL_ICACHE                   = 0x60,
85         ARMV7_A9_PERFCTR_STALL_DISPATCH                 = 0x66,
86 };
87
88 /* ARMv7 Cortex-A5 specific event types */
89 enum armv7_a5_perf_types {
90         ARMV7_A5_PERFCTR_PREFETCH_LINEFILL              = 0xc2,
91         ARMV7_A5_PERFCTR_PREFETCH_LINEFILL_DROP         = 0xc3,
92 };
93
94 /* ARMv7 Cortex-A15 specific event types */
95 enum armv7_a15_perf_types {
96         ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_READ         = 0x40,
97         ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_WRITE        = 0x41,
98         ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_READ         = 0x42,
99         ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_WRITE        = 0x43,
100
101         ARMV7_A15_PERFCTR_DTLB_REFILL_L1_READ           = 0x4C,
102         ARMV7_A15_PERFCTR_DTLB_REFILL_L1_WRITE          = 0x4D,
103
104         ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_READ          = 0x50,
105         ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_WRITE         = 0x51,
106         ARMV7_A15_PERFCTR_L2_CACHE_REFILL_READ          = 0x52,
107         ARMV7_A15_PERFCTR_L2_CACHE_REFILL_WRITE         = 0x53,
108
109         ARMV7_A15_PERFCTR_PC_WRITE_SPEC                 = 0x76,
110 };
111
112 /* ARMv7 Cortex-A12 specific event types */
113 enum armv7_a12_perf_types {
114         ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_READ         = 0x40,
115         ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_WRITE        = 0x41,
116
117         ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_READ          = 0x50,
118         ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_WRITE         = 0x51,
119
120         ARMV7_A12_PERFCTR_PC_WRITE_SPEC                 = 0x76,
121
122         ARMV7_A12_PERFCTR_PF_TLB_REFILL                 = 0xe7,
123 };
124
125 /*
126  * Cortex-A8 HW events mapping
127  *
128  * The hardware events that we support. We do support cache operations but
129  * we have harvard caches and no way to combine instruction and data
130  * accesses/misses in hardware.
131  */
132 static const unsigned armv7_a8_perf_map[PERF_COUNT_HW_MAX] = {
133         [PERF_COUNT_HW_CPU_CYCLES]              = ARMV7_PERFCTR_CPU_CYCLES,
134         [PERF_COUNT_HW_INSTRUCTIONS]            = ARMV7_PERFCTR_INSTR_EXECUTED,
135         [PERF_COUNT_HW_CACHE_REFERENCES]        = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
136         [PERF_COUNT_HW_CACHE_MISSES]            = ARMV7_PERFCTR_L1_DCACHE_REFILL,
137         [PERF_COUNT_HW_BRANCH_INSTRUCTIONS]     = ARMV7_PERFCTR_PC_WRITE,
138         [PERF_COUNT_HW_BRANCH_MISSES]           = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
139         [PERF_COUNT_HW_BUS_CYCLES]              = HW_OP_UNSUPPORTED,
140         [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV7_A8_PERFCTR_STALL_ISIDE,
141         [PERF_COUNT_HW_STALLED_CYCLES_BACKEND]  = HW_OP_UNSUPPORTED,
142 };
143
144 static const unsigned armv7_a8_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
145                                           [PERF_COUNT_HW_CACHE_OP_MAX]
146                                           [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
147         [C(L1D)] = {
148                 /*
149                  * The performance counters don't differentiate between read
150                  * and write accesses/misses so this isn't strictly correct,
151                  * but it's the best we can do. Writes and reads get
152                  * combined.
153                  */
154                 [C(OP_READ)] = {
155                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
156                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_DCACHE_REFILL,
157                 },
158                 [C(OP_WRITE)] = {
159                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
160                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_DCACHE_REFILL,
161                 },
162                 [C(OP_PREFETCH)] = {
163                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
164                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
165                 },
166         },
167         [C(L1I)] = {
168                 [C(OP_READ)] = {
169                         [C(RESULT_ACCESS)]      = ARMV7_A8_PERFCTR_L1_ICACHE_ACCESS,
170                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_ICACHE_REFILL,
171                 },
172                 [C(OP_WRITE)] = {
173                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
174                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
175                 },
176                 [C(OP_PREFETCH)] = {
177                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
178                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
179                 },
180         },
181         [C(LL)] = {
182                 [C(OP_READ)] = {
183                         [C(RESULT_ACCESS)]      = ARMV7_A8_PERFCTR_L2_CACHE_ACCESS,
184                         [C(RESULT_MISS)]        = ARMV7_A8_PERFCTR_L2_CACHE_REFILL,
185                 },
186                 [C(OP_WRITE)] = {
187                         [C(RESULT_ACCESS)]      = ARMV7_A8_PERFCTR_L2_CACHE_ACCESS,
188                         [C(RESULT_MISS)]        = ARMV7_A8_PERFCTR_L2_CACHE_REFILL,
189                 },
190                 [C(OP_PREFETCH)] = {
191                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
192                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
193                 },
194         },
195         [C(DTLB)] = {
196                 [C(OP_READ)] = {
197                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
198                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
199                 },
200                 [C(OP_WRITE)] = {
201                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
202                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
203                 },
204                 [C(OP_PREFETCH)] = {
205                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
206                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
207                 },
208         },
209         [C(ITLB)] = {
210                 [C(OP_READ)] = {
211                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
212                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_REFILL,
213                 },
214                 [C(OP_WRITE)] = {
215                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
216                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_REFILL,
217                 },
218                 [C(OP_PREFETCH)] = {
219                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
220                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
221                 },
222         },
223         [C(BPU)] = {
224                 [C(OP_READ)] = {
225                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
226                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
227                 },
228                 [C(OP_WRITE)] = {
229                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
230                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
231                 },
232                 [C(OP_PREFETCH)] = {
233                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
234                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
235                 },
236         },
237         [C(NODE)] = {
238                 [C(OP_READ)] = {
239                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
240                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
241                 },
242                 [C(OP_WRITE)] = {
243                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
244                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
245                 },
246                 [C(OP_PREFETCH)] = {
247                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
248                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
249                 },
250         },
251 };
252
253 /*
254  * Cortex-A9 HW events mapping
255  */
256 static const unsigned armv7_a9_perf_map[PERF_COUNT_HW_MAX] = {
257         [PERF_COUNT_HW_CPU_CYCLES]              = ARMV7_PERFCTR_CPU_CYCLES,
258         [PERF_COUNT_HW_INSTRUCTIONS]            = ARMV7_A9_PERFCTR_INSTR_CORE_RENAME,
259         [PERF_COUNT_HW_CACHE_REFERENCES]        = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
260         [PERF_COUNT_HW_CACHE_MISSES]            = ARMV7_PERFCTR_L1_DCACHE_REFILL,
261         [PERF_COUNT_HW_BRANCH_INSTRUCTIONS]     = ARMV7_PERFCTR_PC_WRITE,
262         [PERF_COUNT_HW_BRANCH_MISSES]           = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
263         [PERF_COUNT_HW_BUS_CYCLES]              = HW_OP_UNSUPPORTED,
264         [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV7_A9_PERFCTR_STALL_ICACHE,
265         [PERF_COUNT_HW_STALLED_CYCLES_BACKEND]  = ARMV7_A9_PERFCTR_STALL_DISPATCH,
266 };
267
268 static const unsigned armv7_a9_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
269                                           [PERF_COUNT_HW_CACHE_OP_MAX]
270                                           [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
271         [C(L1D)] = {
272                 /*
273                  * The performance counters don't differentiate between read
274                  * and write accesses/misses so this isn't strictly correct,
275                  * but it's the best we can do. Writes and reads get
276                  * combined.
277                  */
278                 [C(OP_READ)] = {
279                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
280                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_DCACHE_REFILL,
281                 },
282                 [C(OP_WRITE)] = {
283                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
284                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_DCACHE_REFILL,
285                 },
286                 [C(OP_PREFETCH)] = {
287                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
288                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
289                 },
290         },
291         [C(L1I)] = {
292                 [C(OP_READ)] = {
293                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
294                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_ICACHE_REFILL,
295                 },
296                 [C(OP_WRITE)] = {
297                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
298                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
299                 },
300                 [C(OP_PREFETCH)] = {
301                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
302                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
303                 },
304         },
305         [C(LL)] = {
306                 [C(OP_READ)] = {
307                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
308                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
309                 },
310                 [C(OP_WRITE)] = {
311                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
312                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
313                 },
314                 [C(OP_PREFETCH)] = {
315                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
316                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
317                 },
318         },
319         [C(DTLB)] = {
320                 [C(OP_READ)] = {
321                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
322                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
323                 },
324                 [C(OP_WRITE)] = {
325                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
326                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
327                 },
328                 [C(OP_PREFETCH)] = {
329                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
330                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
331                 },
332         },
333         [C(ITLB)] = {
334                 [C(OP_READ)] = {
335                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
336                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_REFILL,
337                 },
338                 [C(OP_WRITE)] = {
339                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
340                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_REFILL,
341                 },
342                 [C(OP_PREFETCH)] = {
343                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
344                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
345                 },
346         },
347         [C(BPU)] = {
348                 [C(OP_READ)] = {
349                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
350                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
351                 },
352                 [C(OP_WRITE)] = {
353                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
354                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
355                 },
356                 [C(OP_PREFETCH)] = {
357                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
358                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
359                 },
360         },
361         [C(NODE)] = {
362                 [C(OP_READ)] = {
363                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
364                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
365                 },
366                 [C(OP_WRITE)] = {
367                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
368                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
369                 },
370                 [C(OP_PREFETCH)] = {
371                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
372                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
373                 },
374         },
375 };
376
377 /*
378  * Cortex-A5 HW events mapping
379  */
380 static const unsigned armv7_a5_perf_map[PERF_COUNT_HW_MAX] = {
381         [PERF_COUNT_HW_CPU_CYCLES]              = ARMV7_PERFCTR_CPU_CYCLES,
382         [PERF_COUNT_HW_INSTRUCTIONS]            = ARMV7_PERFCTR_INSTR_EXECUTED,
383         [PERF_COUNT_HW_CACHE_REFERENCES]        = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
384         [PERF_COUNT_HW_CACHE_MISSES]            = ARMV7_PERFCTR_L1_DCACHE_REFILL,
385         [PERF_COUNT_HW_BRANCH_INSTRUCTIONS]     = ARMV7_PERFCTR_PC_WRITE,
386         [PERF_COUNT_HW_BRANCH_MISSES]           = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
387         [PERF_COUNT_HW_BUS_CYCLES]              = HW_OP_UNSUPPORTED,
388         [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = HW_OP_UNSUPPORTED,
389         [PERF_COUNT_HW_STALLED_CYCLES_BACKEND]  = HW_OP_UNSUPPORTED,
390 };
391
392 static const unsigned armv7_a5_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
393                                         [PERF_COUNT_HW_CACHE_OP_MAX]
394                                         [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
395         [C(L1D)] = {
396                 [C(OP_READ)] = {
397                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
398                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_DCACHE_REFILL,
399                 },
400                 [C(OP_WRITE)] = {
401                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
402                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_DCACHE_REFILL,
403                 },
404                 [C(OP_PREFETCH)] = {
405                         [C(RESULT_ACCESS)]      = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL,
406                         [C(RESULT_MISS)]        = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL_DROP,
407                 },
408         },
409         [C(L1I)] = {
410                 [C(OP_READ)] = {
411                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
412                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_ICACHE_REFILL,
413                 },
414                 [C(OP_WRITE)] = {
415                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
416                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
417                 },
418                 /*
419                  * The prefetch counters don't differentiate between the I
420                  * side and the D side.
421                  */
422                 [C(OP_PREFETCH)] = {
423                         [C(RESULT_ACCESS)]      = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL,
424                         [C(RESULT_MISS)]        = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL_DROP,
425                 },
426         },
427         [C(LL)] = {
428                 [C(OP_READ)] = {
429                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
430                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
431                 },
432                 [C(OP_WRITE)] = {
433                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
434                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
435                 },
436                 [C(OP_PREFETCH)] = {
437                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
438                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
439                 },
440         },
441         [C(DTLB)] = {
442                 [C(OP_READ)] = {
443                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
444                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
445                 },
446                 [C(OP_WRITE)] = {
447                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
448                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
449                 },
450                 [C(OP_PREFETCH)] = {
451                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
452                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
453                 },
454         },
455         [C(ITLB)] = {
456                 [C(OP_READ)] = {
457                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
458                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_REFILL,
459                 },
460                 [C(OP_WRITE)] = {
461                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
462                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_REFILL,
463                 },
464                 [C(OP_PREFETCH)] = {
465                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
466                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
467                 },
468         },
469         [C(BPU)] = {
470                 [C(OP_READ)] = {
471                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
472                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
473                 },
474                 [C(OP_WRITE)] = {
475                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
476                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
477                 },
478                 [C(OP_PREFETCH)] = {
479                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
480                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
481                 },
482         },
483         [C(NODE)] = {
484                 [C(OP_READ)] = {
485                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
486                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
487                 },
488                 [C(OP_WRITE)] = {
489                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
490                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
491                 },
492                 [C(OP_PREFETCH)] = {
493                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
494                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
495                 },
496         },
497 };
498
499 /*
500  * Cortex-A15 HW events mapping
501  */
502 static const unsigned armv7_a15_perf_map[PERF_COUNT_HW_MAX] = {
503         [PERF_COUNT_HW_CPU_CYCLES]              = ARMV7_PERFCTR_CPU_CYCLES,
504         [PERF_COUNT_HW_INSTRUCTIONS]            = ARMV7_PERFCTR_INSTR_EXECUTED,
505         [PERF_COUNT_HW_CACHE_REFERENCES]        = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
506         [PERF_COUNT_HW_CACHE_MISSES]            = ARMV7_PERFCTR_L1_DCACHE_REFILL,
507         [PERF_COUNT_HW_BRANCH_INSTRUCTIONS]     = ARMV7_A15_PERFCTR_PC_WRITE_SPEC,
508         [PERF_COUNT_HW_BRANCH_MISSES]           = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
509         [PERF_COUNT_HW_BUS_CYCLES]              = ARMV7_PERFCTR_BUS_CYCLES,
510         [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = HW_OP_UNSUPPORTED,
511         [PERF_COUNT_HW_STALLED_CYCLES_BACKEND]  = HW_OP_UNSUPPORTED,
512 };
513
514 static const unsigned armv7_a15_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
515                                         [PERF_COUNT_HW_CACHE_OP_MAX]
516                                         [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
517         [C(L1D)] = {
518                 [C(OP_READ)] = {
519                         [C(RESULT_ACCESS)]      = ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_READ,
520                         [C(RESULT_MISS)]        = ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_READ,
521                 },
522                 [C(OP_WRITE)] = {
523                         [C(RESULT_ACCESS)]      = ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_WRITE,
524                         [C(RESULT_MISS)]        = ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_WRITE,
525                 },
526                 [C(OP_PREFETCH)] = {
527                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
528                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
529                 },
530         },
531         [C(L1I)] = {
532                 /*
533                  * Not all performance counters differentiate between read
534                  * and write accesses/misses so we're not always strictly
535                  * correct, but it's the best we can do. Writes and reads get
536                  * combined in these cases.
537                  */
538                 [C(OP_READ)] = {
539                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
540                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_ICACHE_REFILL,
541                 },
542                 [C(OP_WRITE)] = {
543                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
544                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
545                 },
546                 [C(OP_PREFETCH)] = {
547                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
548                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
549                 },
550         },
551         [C(LL)] = {
552                 [C(OP_READ)] = {
553                         [C(RESULT_ACCESS)]      = ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_READ,
554                         [C(RESULT_MISS)]        = ARMV7_A15_PERFCTR_L2_CACHE_REFILL_READ,
555                 },
556                 [C(OP_WRITE)] = {
557                         [C(RESULT_ACCESS)]      = ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_WRITE,
558                         [C(RESULT_MISS)]        = ARMV7_A15_PERFCTR_L2_CACHE_REFILL_WRITE,
559                 },
560                 [C(OP_PREFETCH)] = {
561                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
562                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
563                 },
564         },
565         [C(DTLB)] = {
566                 [C(OP_READ)] = {
567                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
568                         [C(RESULT_MISS)]        = ARMV7_A15_PERFCTR_DTLB_REFILL_L1_READ,
569                 },
570                 [C(OP_WRITE)] = {
571                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
572                         [C(RESULT_MISS)]        = ARMV7_A15_PERFCTR_DTLB_REFILL_L1_WRITE,
573                 },
574                 [C(OP_PREFETCH)] = {
575                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
576                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
577                 },
578         },
579         [C(ITLB)] = {
580                 [C(OP_READ)] = {
581                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
582                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_REFILL,
583                 },
584                 [C(OP_WRITE)] = {
585                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
586                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_REFILL,
587                 },
588                 [C(OP_PREFETCH)] = {
589                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
590                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
591                 },
592         },
593         [C(BPU)] = {
594                 [C(OP_READ)] = {
595                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
596                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
597                 },
598                 [C(OP_WRITE)] = {
599                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
600                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
601                 },
602                 [C(OP_PREFETCH)] = {
603                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
604                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
605                 },
606         },
607         [C(NODE)] = {
608                 [C(OP_READ)] = {
609                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
610                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
611                 },
612                 [C(OP_WRITE)] = {
613                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
614                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
615                 },
616                 [C(OP_PREFETCH)] = {
617                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
618                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
619                 },
620         },
621 };
622
623 /*
624  * Cortex-A7 HW events mapping
625  */
626 static const unsigned armv7_a7_perf_map[PERF_COUNT_HW_MAX] = {
627         [PERF_COUNT_HW_CPU_CYCLES]              = ARMV7_PERFCTR_CPU_CYCLES,
628         [PERF_COUNT_HW_INSTRUCTIONS]            = ARMV7_PERFCTR_INSTR_EXECUTED,
629         [PERF_COUNT_HW_CACHE_REFERENCES]        = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
630         [PERF_COUNT_HW_CACHE_MISSES]            = ARMV7_PERFCTR_L1_DCACHE_REFILL,
631         [PERF_COUNT_HW_BRANCH_INSTRUCTIONS]     = ARMV7_PERFCTR_PC_WRITE,
632         [PERF_COUNT_HW_BRANCH_MISSES]           = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
633         [PERF_COUNT_HW_BUS_CYCLES]              = ARMV7_PERFCTR_BUS_CYCLES,
634         [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = HW_OP_UNSUPPORTED,
635         [PERF_COUNT_HW_STALLED_CYCLES_BACKEND]  = HW_OP_UNSUPPORTED,
636 };
637
638 static const unsigned armv7_a7_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
639                                         [PERF_COUNT_HW_CACHE_OP_MAX]
640                                         [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
641         [C(L1D)] = {
642                 /*
643                  * The performance counters don't differentiate between read
644                  * and write accesses/misses so this isn't strictly correct,
645                  * but it's the best we can do. Writes and reads get
646                  * combined.
647                  */
648                 [C(OP_READ)] = {
649                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
650                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_DCACHE_REFILL,
651                 },
652                 [C(OP_WRITE)] = {
653                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
654                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_DCACHE_REFILL,
655                 },
656                 [C(OP_PREFETCH)] = {
657                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
658                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
659                 },
660         },
661         [C(L1I)] = {
662                 [C(OP_READ)] = {
663                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
664                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_ICACHE_REFILL,
665                 },
666                 [C(OP_WRITE)] = {
667                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
668                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
669                 },
670                 [C(OP_PREFETCH)] = {
671                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
672                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
673                 },
674         },
675         [C(LL)] = {
676                 [C(OP_READ)] = {
677                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L2_CACHE_ACCESS,
678                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L2_CACHE_REFILL,
679                 },
680                 [C(OP_WRITE)] = {
681                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L2_CACHE_ACCESS,
682                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L2_CACHE_REFILL,
683                 },
684                 [C(OP_PREFETCH)] = {
685                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
686                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
687                 },
688         },
689         [C(DTLB)] = {
690                 [C(OP_READ)] = {
691                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
692                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
693                 },
694                 [C(OP_WRITE)] = {
695                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
696                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
697                 },
698                 [C(OP_PREFETCH)] = {
699                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
700                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
701                 },
702         },
703         [C(ITLB)] = {
704                 [C(OP_READ)] = {
705                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
706                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_REFILL,
707                 },
708                 [C(OP_WRITE)] = {
709                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
710                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_REFILL,
711                 },
712                 [C(OP_PREFETCH)] = {
713                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
714                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
715                 },
716         },
717         [C(BPU)] = {
718                 [C(OP_READ)] = {
719                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
720                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
721                 },
722                 [C(OP_WRITE)] = {
723                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
724                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
725                 },
726                 [C(OP_PREFETCH)] = {
727                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
728                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
729                 },
730         },
731         [C(NODE)] = {
732                 [C(OP_READ)] = {
733                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
734                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
735                 },
736                 [C(OP_WRITE)] = {
737                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
738                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
739                 },
740                 [C(OP_PREFETCH)] = {
741                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
742                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
743                 },
744         },
745 };
746
747 /*
748  * Cortex-A12 HW events mapping
749  */
750 static const unsigned armv7_a12_perf_map[PERF_COUNT_HW_MAX] = {
751         [PERF_COUNT_HW_CPU_CYCLES]              = ARMV7_PERFCTR_CPU_CYCLES,
752         [PERF_COUNT_HW_INSTRUCTIONS]            = ARMV7_PERFCTR_INSTR_EXECUTED,
753         [PERF_COUNT_HW_CACHE_REFERENCES]        = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
754         [PERF_COUNT_HW_CACHE_MISSES]            = ARMV7_PERFCTR_L1_DCACHE_REFILL,
755         [PERF_COUNT_HW_BRANCH_INSTRUCTIONS]     = ARMV7_A12_PERFCTR_PC_WRITE_SPEC,
756         [PERF_COUNT_HW_BRANCH_MISSES]           = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
757         [PERF_COUNT_HW_BUS_CYCLES]              = ARMV7_PERFCTR_BUS_CYCLES,
758         [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = HW_OP_UNSUPPORTED,
759         [PERF_COUNT_HW_STALLED_CYCLES_BACKEND]  = HW_OP_UNSUPPORTED,
760 };
761
762 static const unsigned armv7_a12_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
763                                         [PERF_COUNT_HW_CACHE_OP_MAX]
764                                         [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
765         [C(L1D)] = {
766                 [C(OP_READ)] = {
767                         [C(RESULT_ACCESS)]      = ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_READ,
768                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_DCACHE_REFILL,
769                 },
770                 [C(OP_WRITE)] = {
771                         [C(RESULT_ACCESS)]      = ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_WRITE,
772                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_DCACHE_REFILL,
773                 },
774                 [C(OP_PREFETCH)] = {
775                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
776                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
777                 },
778         },
779         [C(L1I)] = {
780                 /*
781                  * Not all performance counters differentiate between read
782                  * and write accesses/misses so we're not always strictly
783                  * correct, but it's the best we can do. Writes and reads get
784                  * combined in these cases.
785                  */
786                 [C(OP_READ)] = {
787                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
788                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_ICACHE_REFILL,
789                 },
790                 [C(OP_WRITE)] = {
791                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
792                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
793                 },
794                 [C(OP_PREFETCH)] = {
795                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
796                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
797                 },
798         },
799         [C(LL)] = {
800                 [C(OP_READ)] = {
801                         [C(RESULT_ACCESS)]      = ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_READ,
802                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L2_CACHE_REFILL,
803                 },
804                 [C(OP_WRITE)] = {
805                         [C(RESULT_ACCESS)]      = ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_WRITE,
806                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L2_CACHE_REFILL,
807                 },
808                 [C(OP_PREFETCH)] = {
809                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
810                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
811                 },
812         },
813         [C(DTLB)] = {
814                 [C(OP_READ)] = {
815                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
816                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
817                 },
818                 [C(OP_WRITE)] = {
819                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
820                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
821                 },
822                 [C(OP_PREFETCH)] = {
823                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
824                         [C(RESULT_MISS)]        = ARMV7_A12_PERFCTR_PF_TLB_REFILL,
825                 },
826         },
827         [C(ITLB)] = {
828                 [C(OP_READ)] = {
829                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
830                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_REFILL,
831                 },
832                 [C(OP_WRITE)] = {
833                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
834                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_REFILL,
835                 },
836                 [C(OP_PREFETCH)] = {
837                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
838                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
839                 },
840         },
841         [C(BPU)] = {
842                 [C(OP_READ)] = {
843                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
844                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
845                 },
846                 [C(OP_WRITE)] = {
847                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
848                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
849                 },
850                 [C(OP_PREFETCH)] = {
851                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
852                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
853                 },
854         },
855         [C(NODE)] = {
856                 [C(OP_READ)] = {
857                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
858                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
859                 },
860                 [C(OP_WRITE)] = {
861                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
862                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
863                 },
864                 [C(OP_PREFETCH)] = {
865                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
866                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
867                 },
868         },
869 };
870
871 /*
872  * Perf Events' indices
873  */
874 #define ARMV7_IDX_CYCLE_COUNTER 0
875 #define ARMV7_IDX_COUNTER0      1
876 #define ARMV7_IDX_COUNTER_LAST(cpu_pmu) \
877         (ARMV7_IDX_CYCLE_COUNTER + cpu_pmu->num_events - 1)
878
879 #define ARMV7_MAX_COUNTERS      32
880 #define ARMV7_COUNTER_MASK      (ARMV7_MAX_COUNTERS - 1)
881
882 /*
883  * ARMv7 low level PMNC access
884  */
885
886 /*
887  * Perf Event to low level counters mapping
888  */
889 #define ARMV7_IDX_TO_COUNTER(x) \
890         (((x) - ARMV7_IDX_COUNTER0) & ARMV7_COUNTER_MASK)
891
892 /*
893  * Per-CPU PMNC: config reg
894  */
895 #define ARMV7_PMNC_E            (1 << 0) /* Enable all counters */
896 #define ARMV7_PMNC_P            (1 << 1) /* Reset all counters */
897 #define ARMV7_PMNC_C            (1 << 2) /* Cycle counter reset */
898 #define ARMV7_PMNC_D            (1 << 3) /* CCNT counts every 64th cpu cycle */
899 #define ARMV7_PMNC_X            (1 << 4) /* Export to ETM */
900 #define ARMV7_PMNC_DP           (1 << 5) /* Disable CCNT if non-invasive debug*/
901 #define ARMV7_PMNC_N_SHIFT      11       /* Number of counters supported */
902 #define ARMV7_PMNC_N_MASK       0x1f
903 #define ARMV7_PMNC_MASK         0x3f     /* Mask for writable bits */
904
905 /*
906  * FLAG: counters overflow flag status reg
907  */
908 #define ARMV7_FLAG_MASK         0xffffffff      /* Mask for writable bits */
909 #define ARMV7_OVERFLOWED_MASK   ARMV7_FLAG_MASK
910
911 /*
912  * PMXEVTYPER: Event selection reg
913  */
914 #define ARMV7_EVTYPE_MASK       0xc80000ff      /* Mask for writable bits */
915 #define ARMV7_EVTYPE_EVENT      0xff            /* Mask for EVENT bits */
916
917 /*
918  * Event filters for PMUv2
919  */
920 #define ARMV7_EXCLUDE_PL1       (1 << 31)
921 #define ARMV7_EXCLUDE_USER      (1 << 30)
922 #define ARMV7_INCLUDE_HYP       (1 << 27)
923
924 static inline u32 armv7_pmnc_read(void)
925 {
926         u32 val;
927         asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r"(val));
928         return val;
929 }
930
931 static inline void armv7_pmnc_write(u32 val)
932 {
933         val &= ARMV7_PMNC_MASK;
934         isb();
935         asm volatile("mcr p15, 0, %0, c9, c12, 0" : : "r"(val));
936 }
937
938 static inline int armv7_pmnc_has_overflowed(u32 pmnc)
939 {
940         return pmnc & ARMV7_OVERFLOWED_MASK;
941 }
942
943 static inline int armv7_pmnc_counter_valid(struct arm_pmu *cpu_pmu, int idx)
944 {
945         return idx >= ARMV7_IDX_CYCLE_COUNTER &&
946                 idx <= ARMV7_IDX_COUNTER_LAST(cpu_pmu);
947 }
948
949 static inline int armv7_pmnc_counter_has_overflowed(u32 pmnc, int idx)
950 {
951         return pmnc & BIT(ARMV7_IDX_TO_COUNTER(idx));
952 }
953
954 static inline int armv7_pmnc_select_counter(int idx)
955 {
956         u32 counter = ARMV7_IDX_TO_COUNTER(idx);
957         asm volatile("mcr p15, 0, %0, c9, c12, 5" : : "r" (counter));
958         isb();
959
960         return idx;
961 }
962
963 static inline u32 armv7pmu_read_counter(struct perf_event *event)
964 {
965         struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
966         struct hw_perf_event *hwc = &event->hw;
967         int idx = hwc->idx;
968         u32 value = 0;
969
970         if (!armv7_pmnc_counter_valid(cpu_pmu, idx))
971                 pr_err("CPU%u reading wrong counter %d\n",
972                         smp_processor_id(), idx);
973         else if (idx == ARMV7_IDX_CYCLE_COUNTER)
974                 asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (value));
975         else if (armv7_pmnc_select_counter(idx) == idx)
976                 asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (value));
977
978         return value;
979 }
980
981 static inline void armv7pmu_write_counter(struct perf_event *event, u32 value)
982 {
983         struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
984         struct hw_perf_event *hwc = &event->hw;
985         int idx = hwc->idx;
986
987         if (!armv7_pmnc_counter_valid(cpu_pmu, idx))
988                 pr_err("CPU%u writing wrong counter %d\n",
989                         smp_processor_id(), idx);
990         else if (idx == ARMV7_IDX_CYCLE_COUNTER)
991                 asm volatile("mcr p15, 0, %0, c9, c13, 0" : : "r" (value));
992         else if (armv7_pmnc_select_counter(idx) == idx)
993                 asm volatile("mcr p15, 0, %0, c9, c13, 2" : : "r" (value));
994 }
995
996 static inline void armv7_pmnc_write_evtsel(int idx, u32 val)
997 {
998         if (armv7_pmnc_select_counter(idx) == idx) {
999                 val &= ARMV7_EVTYPE_MASK;
1000                 asm volatile("mcr p15, 0, %0, c9, c13, 1" : : "r" (val));
1001         }
1002 }
1003
1004 static inline int armv7_pmnc_enable_counter(int idx)
1005 {
1006         u32 counter = ARMV7_IDX_TO_COUNTER(idx);
1007         asm volatile("mcr p15, 0, %0, c9, c12, 1" : : "r" (BIT(counter)));
1008         return idx;
1009 }
1010
1011 static inline int armv7_pmnc_disable_counter(int idx)
1012 {
1013         u32 counter = ARMV7_IDX_TO_COUNTER(idx);
1014         asm volatile("mcr p15, 0, %0, c9, c12, 2" : : "r" (BIT(counter)));
1015         return idx;
1016 }
1017
1018 static inline int armv7_pmnc_enable_intens(int idx)
1019 {
1020         u32 counter = ARMV7_IDX_TO_COUNTER(idx);
1021         asm volatile("mcr p15, 0, %0, c9, c14, 1" : : "r" (BIT(counter)));
1022         return idx;
1023 }
1024
1025 static inline int armv7_pmnc_disable_intens(int idx)
1026 {
1027         u32 counter = ARMV7_IDX_TO_COUNTER(idx);
1028         asm volatile("mcr p15, 0, %0, c9, c14, 2" : : "r" (BIT(counter)));
1029         isb();
1030         /* Clear the overflow flag in case an interrupt is pending. */
1031         asm volatile("mcr p15, 0, %0, c9, c12, 3" : : "r" (BIT(counter)));
1032         isb();
1033
1034         return idx;
1035 }
1036
1037 static inline u32 armv7_pmnc_getreset_flags(void)
1038 {
1039         u32 val;
1040
1041         /* Read */
1042         asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
1043
1044         /* Write to clear flags */
1045         val &= ARMV7_FLAG_MASK;
1046         asm volatile("mcr p15, 0, %0, c9, c12, 3" : : "r" (val));
1047
1048         return val;
1049 }
1050
1051 #ifdef DEBUG
1052 static void armv7_pmnc_dump_regs(struct arm_pmu *cpu_pmu)
1053 {
1054         u32 val;
1055         unsigned int cnt;
1056
1057         printk(KERN_INFO "PMNC registers dump:\n");
1058
1059         asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r" (val));
1060         printk(KERN_INFO "PMNC  =0x%08x\n", val);
1061
1062         asm volatile("mrc p15, 0, %0, c9, c12, 1" : "=r" (val));
1063         printk(KERN_INFO "CNTENS=0x%08x\n", val);
1064
1065         asm volatile("mrc p15, 0, %0, c9, c14, 1" : "=r" (val));
1066         printk(KERN_INFO "INTENS=0x%08x\n", val);
1067
1068         asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
1069         printk(KERN_INFO "FLAGS =0x%08x\n", val);
1070
1071         asm volatile("mrc p15, 0, %0, c9, c12, 5" : "=r" (val));
1072         printk(KERN_INFO "SELECT=0x%08x\n", val);
1073
1074         asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (val));
1075         printk(KERN_INFO "CCNT  =0x%08x\n", val);
1076
1077         for (cnt = ARMV7_IDX_COUNTER0;
1078                         cnt <= ARMV7_IDX_COUNTER_LAST(cpu_pmu); cnt++) {
1079                 armv7_pmnc_select_counter(cnt);
1080                 asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (val));
1081                 printk(KERN_INFO "CNT[%d] count =0x%08x\n",
1082                         ARMV7_IDX_TO_COUNTER(cnt), val);
1083                 asm volatile("mrc p15, 0, %0, c9, c13, 1" : "=r" (val));
1084                 printk(KERN_INFO "CNT[%d] evtsel=0x%08x\n",
1085                         ARMV7_IDX_TO_COUNTER(cnt), val);
1086         }
1087 }
1088 #endif
1089
1090 static void armv7pmu_save_regs(struct arm_pmu *cpu_pmu,
1091                                         struct cpupmu_regs *regs)
1092 {
1093         unsigned int cnt;
1094         asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r" (regs->pmc));
1095         if (!(regs->pmc & ARMV7_PMNC_E))
1096                 return;
1097
1098         asm volatile("mrc p15, 0, %0, c9, c12, 1" : "=r" (regs->pmcntenset));
1099         asm volatile("mrc p15, 0, %0, c9, c14, 0" : "=r" (regs->pmuseren));
1100         asm volatile("mrc p15, 0, %0, c9, c14, 1" : "=r" (regs->pmintenset));
1101         asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (regs->pmxevtcnt[0]));
1102         for (cnt = ARMV7_IDX_COUNTER0;
1103                         cnt <= ARMV7_IDX_COUNTER_LAST(cpu_pmu); cnt++) {
1104                 armv7_pmnc_select_counter(cnt);
1105                 asm volatile("mrc p15, 0, %0, c9, c13, 1"
1106                                         : "=r"(regs->pmxevttype[cnt]));
1107                 asm volatile("mrc p15, 0, %0, c9, c13, 2"
1108                                         : "=r"(regs->pmxevtcnt[cnt]));
1109         }
1110         return;
1111 }
1112
1113 static void armv7pmu_restore_regs(struct arm_pmu *cpu_pmu,
1114                                         struct cpupmu_regs *regs)
1115 {
1116         unsigned int cnt;
1117         if (!(regs->pmc & ARMV7_PMNC_E))
1118                 return;
1119
1120         asm volatile("mcr p15, 0, %0, c9, c12, 1" : : "r" (regs->pmcntenset));
1121         asm volatile("mcr p15, 0, %0, c9, c14, 0" : : "r" (regs->pmuseren));
1122         asm volatile("mcr p15, 0, %0, c9, c14, 1" : : "r" (regs->pmintenset));
1123         asm volatile("mcr p15, 0, %0, c9, c13, 0" : : "r" (regs->pmxevtcnt[0]));
1124         for (cnt = ARMV7_IDX_COUNTER0;
1125                         cnt <= ARMV7_IDX_COUNTER_LAST(cpu_pmu); cnt++) {
1126                 armv7_pmnc_select_counter(cnt);
1127                 asm volatile("mcr p15, 0, %0, c9, c13, 1"
1128                                         : : "r"(regs->pmxevttype[cnt]));
1129                 asm volatile("mcr p15, 0, %0, c9, c13, 2"
1130                                         : : "r"(regs->pmxevtcnt[cnt]));
1131         }
1132         asm volatile("mcr p15, 0, %0, c9, c12, 0" : : "r" (regs->pmc));
1133 }
1134
1135 static void armv7pmu_enable_event(struct perf_event *event)
1136 {
1137         unsigned long flags;
1138         struct hw_perf_event *hwc = &event->hw;
1139         struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1140         struct pmu_hw_events *events = cpu_pmu->get_hw_events();
1141         int idx = hwc->idx;
1142
1143         if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) {
1144                 pr_err("CPU%u enabling wrong PMNC counter IRQ enable %d\n",
1145                         smp_processor_id(), idx);
1146                 return;
1147         }
1148
1149         /*
1150          * Enable counter and interrupt, and set the counter to count
1151          * the event that we're interested in.
1152          */
1153         raw_spin_lock_irqsave(&events->pmu_lock, flags);
1154
1155         /*
1156          * Disable counter
1157          */
1158         armv7_pmnc_disable_counter(idx);
1159
1160         /*
1161          * Set event (if destined for PMNx counters)
1162          * We only need to set the event for the cycle counter if we
1163          * have the ability to perform event filtering.
1164          */
1165         if (cpu_pmu->set_event_filter || idx != ARMV7_IDX_CYCLE_COUNTER)
1166                 armv7_pmnc_write_evtsel(idx, hwc->config_base);
1167
1168         /*
1169          * Enable interrupt for this counter
1170          */
1171         armv7_pmnc_enable_intens(idx);
1172
1173         /*
1174          * Enable counter
1175          */
1176         armv7_pmnc_enable_counter(idx);
1177
1178         raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1179 }
1180
1181 static void armv7pmu_disable_event(struct perf_event *event)
1182 {
1183         unsigned long flags;
1184         struct hw_perf_event *hwc = &event->hw;
1185         struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1186         struct pmu_hw_events *events = cpu_pmu->get_hw_events();
1187         int idx = hwc->idx;
1188
1189         if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) {
1190                 pr_err("CPU%u disabling wrong PMNC counter IRQ enable %d\n",
1191                         smp_processor_id(), idx);
1192                 return;
1193         }
1194
1195         /*
1196          * Disable counter and interrupt
1197          */
1198         raw_spin_lock_irqsave(&events->pmu_lock, flags);
1199
1200         /*
1201          * Disable counter
1202          */
1203         armv7_pmnc_disable_counter(idx);
1204
1205         /*
1206          * Disable interrupt for this counter
1207          */
1208         armv7_pmnc_disable_intens(idx);
1209
1210         raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1211 }
1212
1213 static irqreturn_t armv7pmu_handle_irq(int irq_num, void *dev)
1214 {
1215         u32 pmnc;
1216         struct perf_sample_data data;
1217         struct arm_pmu *cpu_pmu = (struct arm_pmu *)dev;
1218         struct pmu_hw_events *cpuc = cpu_pmu->get_hw_events();
1219         struct pt_regs *regs;
1220         int idx;
1221
1222         /*
1223          * Get and reset the IRQ flags
1224          */
1225         pmnc = armv7_pmnc_getreset_flags();
1226
1227         /*
1228          * Did an overflow occur?
1229          */
1230         if (!armv7_pmnc_has_overflowed(pmnc))
1231                 return IRQ_NONE;
1232
1233         /*
1234          * Handle the counter(s) overflow(s)
1235          */
1236         regs = get_irq_regs();
1237
1238         for (idx = 0; idx < cpu_pmu->num_events; ++idx) {
1239                 struct perf_event *event = cpuc->events[idx];
1240                 struct hw_perf_event *hwc;
1241
1242                 /* Ignore if we don't have an event. */
1243                 if (!event)
1244                         continue;
1245
1246                 /*
1247                  * We have a single interrupt for all counters. Check that
1248                  * each counter has overflowed before we process it.
1249                  */
1250                 if (!armv7_pmnc_counter_has_overflowed(pmnc, idx))
1251                         continue;
1252
1253                 hwc = &event->hw;
1254                 armpmu_event_update(event);
1255                 perf_sample_data_init(&data, 0, hwc->last_period);
1256                 if (!armpmu_event_set_period(event))
1257                         continue;
1258
1259                 if (perf_event_overflow(event, &data, regs))
1260                         cpu_pmu->disable(event);
1261         }
1262
1263         /*
1264          * Handle the pending perf events.
1265          *
1266          * Note: this call *must* be run with interrupts disabled. For
1267          * platforms that can have the PMU interrupts raised as an NMI, this
1268          * will not work.
1269          */
1270         irq_work_run();
1271
1272         return IRQ_HANDLED;
1273 }
1274
1275 static void armv7pmu_start(struct arm_pmu *cpu_pmu)
1276 {
1277         unsigned long flags;
1278         struct pmu_hw_events *events = cpu_pmu->get_hw_events();
1279
1280         raw_spin_lock_irqsave(&events->pmu_lock, flags);
1281         /* Enable all counters */
1282         armv7_pmnc_write(armv7_pmnc_read() | ARMV7_PMNC_E);
1283         raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1284 }
1285
1286 static void armv7pmu_stop(struct arm_pmu *cpu_pmu)
1287 {
1288         unsigned long flags;
1289         struct pmu_hw_events *events = cpu_pmu->get_hw_events();
1290
1291         raw_spin_lock_irqsave(&events->pmu_lock, flags);
1292         /* Disable all counters */
1293         armv7_pmnc_write(armv7_pmnc_read() & ~ARMV7_PMNC_E);
1294         raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1295 }
1296
1297 static int armv7pmu_get_event_idx(struct pmu_hw_events *cpuc,
1298                                   struct perf_event *event)
1299 {
1300         int idx;
1301         struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1302         struct hw_perf_event *hwc = &event->hw;
1303         unsigned long evtype = hwc->config_base & ARMV7_EVTYPE_EVENT;
1304
1305         /* Always place a cycle counter into the cycle counter. */
1306         if (evtype == ARMV7_PERFCTR_CPU_CYCLES) {
1307                 if (test_and_set_bit(ARMV7_IDX_CYCLE_COUNTER, cpuc->used_mask))
1308                         return -EAGAIN;
1309
1310                 return ARMV7_IDX_CYCLE_COUNTER;
1311         }
1312
1313         /*
1314          * For anything other than a cycle counter, try and use
1315          * the events counters
1316          */
1317         for (idx = ARMV7_IDX_COUNTER0; idx < cpu_pmu->num_events; ++idx) {
1318                 if (!test_and_set_bit(idx, cpuc->used_mask))
1319                         return idx;
1320         }
1321
1322         /* The counters are all in use. */
1323         return -EAGAIN;
1324 }
1325
1326 /*
1327  * Add an event filter to a given event. This will only work for PMUv2 PMUs.
1328  */
1329 static int armv7pmu_set_event_filter(struct hw_perf_event *event,
1330                                      struct perf_event_attr *attr)
1331 {
1332         unsigned long config_base = 0;
1333
1334         if (attr->exclude_idle)
1335                 return -EPERM;
1336         if (attr->exclude_user)
1337                 config_base |= ARMV7_EXCLUDE_USER;
1338         if (attr->exclude_kernel)
1339                 config_base |= ARMV7_EXCLUDE_PL1;
1340         if (!attr->exclude_hv)
1341                 config_base |= ARMV7_INCLUDE_HYP;
1342
1343         /*
1344          * Install the filter into config_base as this is used to
1345          * construct the event type.
1346          */
1347         event->config_base = config_base;
1348
1349         return 0;
1350 }
1351
1352 static void armv7pmu_reset(void *info)
1353 {
1354         struct arm_pmu *cpu_pmu = (struct arm_pmu *)info;
1355         u32 idx, nb_cnt = cpu_pmu->num_events;
1356
1357         /* The counter and interrupt enable registers are unknown at reset. */
1358         for (idx = ARMV7_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) {
1359                 armv7_pmnc_disable_counter(idx);
1360                 armv7_pmnc_disable_intens(idx);
1361         }
1362
1363         /* Initialize & Reset PMNC: C and P bits */
1364         armv7_pmnc_write(ARMV7_PMNC_P | ARMV7_PMNC_C);
1365 }
1366
1367 static int armv7_a8_map_event(struct perf_event *event)
1368 {
1369         return armpmu_map_event(event, &armv7_a8_perf_map,
1370                                 &armv7_a8_perf_cache_map, 0xFF);
1371 }
1372
1373 static int armv7_a9_map_event(struct perf_event *event)
1374 {
1375         return armpmu_map_event(event, &armv7_a9_perf_map,
1376                                 &armv7_a9_perf_cache_map, 0xFF);
1377 }
1378
1379 static int armv7_a5_map_event(struct perf_event *event)
1380 {
1381         return armpmu_map_event(event, &armv7_a5_perf_map,
1382                                 &armv7_a5_perf_cache_map, 0xFF);
1383 }
1384
1385 static int armv7_a15_map_event(struct perf_event *event)
1386 {
1387         return armpmu_map_event(event, &armv7_a15_perf_map,
1388                                 &armv7_a15_perf_cache_map, 0xFF);
1389 }
1390
1391 static int armv7_a7_map_event(struct perf_event *event)
1392 {
1393         return armpmu_map_event(event, &armv7_a7_perf_map,
1394                                 &armv7_a7_perf_cache_map, 0xFF);
1395 }
1396
1397 static int armv7_a12_map_event(struct perf_event *event)
1398 {
1399         return armpmu_map_event(event, &armv7_a12_perf_map,
1400                                 &armv7_a12_perf_cache_map, 0xFF);
1401 }
1402
1403 static void armv7pmu_init(struct arm_pmu *cpu_pmu)
1404 {
1405         cpu_pmu->handle_irq     = armv7pmu_handle_irq;
1406         cpu_pmu->enable         = armv7pmu_enable_event;
1407         cpu_pmu->disable        = armv7pmu_disable_event;
1408         cpu_pmu->read_counter   = armv7pmu_read_counter;
1409         cpu_pmu->write_counter  = armv7pmu_write_counter;
1410         cpu_pmu->get_event_idx  = armv7pmu_get_event_idx;
1411         cpu_pmu->start          = armv7pmu_start;
1412         cpu_pmu->stop           = armv7pmu_stop;
1413         cpu_pmu->reset          = armv7pmu_reset;
1414         cpu_pmu->save_regs      = armv7pmu_save_regs;
1415         cpu_pmu->restore_regs   = armv7pmu_restore_regs;
1416         cpu_pmu->max_period     = (1LLU << 32) - 1;
1417 };
1418
1419 static u32 armv7_read_num_pmnc_events(void)
1420 {
1421         u32 nb_cnt;
1422
1423         /* Read the nb of CNTx counters supported from PMNC */
1424         nb_cnt = (armv7_pmnc_read() >> ARMV7_PMNC_N_SHIFT) & ARMV7_PMNC_N_MASK;
1425
1426         /* Add the CPU cycles counter and return */
1427         return nb_cnt + 1;
1428 }
1429
1430 static int armv7_a8_pmu_init(struct arm_pmu *cpu_pmu)
1431 {
1432         armv7pmu_init(cpu_pmu);
1433         cpu_pmu->name           = "ARMv7_Cortex_A8";
1434         cpu_pmu->map_event      = armv7_a8_map_event;
1435         cpu_pmu->num_events     = armv7_read_num_pmnc_events();
1436         return 0;
1437 }
1438
1439 static int armv7_a9_pmu_init(struct arm_pmu *cpu_pmu)
1440 {
1441         armv7pmu_init(cpu_pmu);
1442         cpu_pmu->name           = "ARMv7_Cortex_A9";
1443         cpu_pmu->map_event      = armv7_a9_map_event;
1444         cpu_pmu->num_events     = armv7_read_num_pmnc_events();
1445         return 0;
1446 }
1447
1448 static int armv7_a5_pmu_init(struct arm_pmu *cpu_pmu)
1449 {
1450         armv7pmu_init(cpu_pmu);
1451         cpu_pmu->name           = "ARMv7_Cortex_A5";
1452         cpu_pmu->map_event      = armv7_a5_map_event;
1453         cpu_pmu->num_events     = armv7_read_num_pmnc_events();
1454         return 0;
1455 }
1456
1457 static int armv7_a15_pmu_init(struct arm_pmu *cpu_pmu)
1458 {
1459         armv7pmu_init(cpu_pmu);
1460         cpu_pmu->name           = "ARMv7_Cortex_A15";
1461         cpu_pmu->map_event      = armv7_a15_map_event;
1462         cpu_pmu->num_events     = armv7_read_num_pmnc_events();
1463         cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1464         return 0;
1465 }
1466
1467 static int armv7_a7_pmu_init(struct arm_pmu *cpu_pmu)
1468 {
1469         armv7pmu_init(cpu_pmu);
1470         cpu_pmu->name           = "ARMv7_Cortex_A7";
1471         cpu_pmu->map_event      = armv7_a7_map_event;
1472         cpu_pmu->num_events     = armv7_read_num_pmnc_events();
1473         cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1474         return 0;
1475 }
1476
1477 static int armv7_a12_pmu_init(struct arm_pmu *cpu_pmu)
1478 {
1479         armv7pmu_init(cpu_pmu);
1480         cpu_pmu->name           = "ARMv7 Cortex-A12";
1481         cpu_pmu->map_event      = armv7_a12_map_event;
1482         cpu_pmu->num_events     = armv7_read_num_pmnc_events();
1483         cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1484         return 0;
1485 }
1486 #else
1487 static inline int armv7_a8_pmu_init(struct arm_pmu *cpu_pmu)
1488 {
1489         return -ENODEV;
1490 }
1491
1492 static inline int armv7_a9_pmu_init(struct arm_pmu *cpu_pmu)
1493 {
1494         return -ENODEV;
1495 }
1496
1497 static inline int armv7_a5_pmu_init(struct arm_pmu *cpu_pmu)
1498 {
1499         return -ENODEV;
1500 }
1501
1502 static inline int armv7_a15_pmu_init(struct arm_pmu *cpu_pmu)
1503 {
1504         return -ENODEV;
1505 }
1506
1507 static inline int armv7_a7_pmu_init(struct arm_pmu *cpu_pmu)
1508 {
1509         return -ENODEV;
1510 }
1511
1512 static inline int armv7_a12_pmu_init(struct arm_pmu *cpu_pmu)
1513 {
1514         return -ENODEV;
1515 }
1516 #endif  /* CONFIG_CPU_V7 */