UPSTREAM: arm64: perf: add format entry to describe event -> config mapping
[firefly-linux-kernel-4.4.55.git] / arch / arm64 / kernel / perf_event.c
1 /*
2  * PMU support
3  *
4  * Copyright (C) 2012 ARM Limited
5  * Author: Will Deacon <will.deacon@arm.com>
6  *
7  * This code is based heavily on the ARMv7 perf event code.
8  *
9  * This program is free software; you can redistribute it and/or modify
10  * it under the terms of the GNU General Public License version 2 as
11  * published by the Free Software Foundation.
12  *
13  * This program is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16  * GNU General Public License for more details.
17  *
18  * You should have received a copy of the GNU General Public License
19  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
20  */
21
22 #include <asm/irq_regs.h>
23
24 #include <linux/of.h>
25 #include <linux/perf/arm_pmu.h>
26 #include <linux/platform_device.h>
27
28 /*
29  * ARMv8 PMUv3 Performance Events handling code.
30  * Common event types.
31  */
32
33 /* Required events. */
34 #define ARMV8_PMUV3_PERFCTR_PMNC_SW_INCR                        0x00
35 #define ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL                    0x03
36 #define ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS                    0x04
37 #define ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED                  0x10
38 #define ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES                        0x11
39 #define ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED                      0x12
40
41 /* At least one of the following is required. */
42 #define ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED                      0x08
43 #define ARMV8_PMUV3_PERFCTR_OP_SPEC                             0x1B
44
45 /* Common architectural events. */
46 #define ARMV8_PMUV3_PERFCTR_MEM_READ                            0x06
47 #define ARMV8_PMUV3_PERFCTR_MEM_WRITE                           0x07
48 #define ARMV8_PMUV3_PERFCTR_EXC_TAKEN                           0x09
49 #define ARMV8_PMUV3_PERFCTR_EXC_EXECUTED                        0x0A
50 #define ARMV8_PMUV3_PERFCTR_CID_WRITE                           0x0B
51 #define ARMV8_PMUV3_PERFCTR_PC_WRITE                            0x0C
52 #define ARMV8_PMUV3_PERFCTR_PC_IMM_BRANCH                       0x0D
53 #define ARMV8_PMUV3_PERFCTR_PC_PROC_RETURN                      0x0E
54 #define ARMV8_PMUV3_PERFCTR_MEM_UNALIGNED_ACCESS                0x0F
55 #define ARMV8_PMUV3_PERFCTR_TTBR_WRITE                          0x1C
56 #define ARMV8_PMUV3_PERFCTR_CHAIN                               0x1E
57 #define ARMV8_PMUV3_PERFCTR_BR_RETIRED                          0x21
58
59 /* Common microarchitectural events. */
60 #define ARMV8_PMUV3_PERFCTR_L1_ICACHE_REFILL                    0x01
61 #define ARMV8_PMUV3_PERFCTR_ITLB_REFILL                         0x02
62 #define ARMV8_PMUV3_PERFCTR_DTLB_REFILL                         0x05
63 #define ARMV8_PMUV3_PERFCTR_MEM_ACCESS                          0x13
64 #define ARMV8_PMUV3_PERFCTR_L1_ICACHE_ACCESS                    0x14
65 #define ARMV8_PMUV3_PERFCTR_L1_DCACHE_WB                        0x15
66 #define ARMV8_PMUV3_PERFCTR_L2_CACHE_ACCESS                     0x16
67 #define ARMV8_PMUV3_PERFCTR_L2_CACHE_REFILL                     0x17
68 #define ARMV8_PMUV3_PERFCTR_L2_CACHE_WB                         0x18
69 #define ARMV8_PMUV3_PERFCTR_BUS_ACCESS                          0x19
70 #define ARMV8_PMUV3_PERFCTR_MEM_ERROR                           0x1A
71 #define ARMV8_PMUV3_PERFCTR_BUS_CYCLES                          0x1D
72 #define ARMV8_PMUV3_PERFCTR_L1D_CACHE_ALLOCATE                  0x1F
73 #define ARMV8_PMUV3_PERFCTR_L2D_CACHE_ALLOCATE                  0x20
74 #define ARMV8_PMUV3_PERFCTR_BR_MIS_PRED_RETIRED                 0x22
75 #define ARMV8_PMUV3_PERFCTR_STALL_FRONTEND                      0x23
76 #define ARMV8_PMUV3_PERFCTR_STALL_BACKEND                       0x24
77 #define ARMV8_PMUV3_PERFCTR_L1D_TLB                             0x25
78 #define ARMV8_PMUV3_PERFCTR_L1I_TLB                             0x26
79 #define ARMV8_PMUV3_PERFCTR_L2I_CACHE                           0x27
80 #define ARMV8_PMUV3_PERFCTR_L2I_CACHE_REFILL                    0x28
81 #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_ALLOCATE                  0x29
82 #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_REFILL                    0x2A
83 #define ARMV8_PMUV3_PERFCTR_L3D_CACHE                           0x2B
84 #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_WB                        0x2C
85 #define ARMV8_PMUV3_PERFCTR_L2D_TLB_REFILL                      0x2D
86 #define ARMV8_PMUV3_PERFCTR_L21_TLB_REFILL                      0x2E
87 #define ARMV8_PMUV3_PERFCTR_L2D_TLB                             0x2F
88 #define ARMV8_PMUV3_PERFCTR_L21_TLB                             0x30
89
90 /* ARMv8 Cortex-A53 specific event types. */
91 #define ARMV8_A53_PERFCTR_PREFETCH_LINEFILL                     0xC2
92
93 /* ARMv8 Cortex-A57 specific event types. */
94 #define ARMV8_A57_PERFCTR_L1_DCACHE_ACCESS_LD                   0x40
95 #define ARMV8_A57_PERFCTR_L1_DCACHE_ACCESS_ST                   0x41
96 #define ARMV8_A57_PERFCTR_L1_DCACHE_REFILL_LD                   0x42
97 #define ARMV8_A57_PERFCTR_L1_DCACHE_REFILL_ST                   0x43
98 #define ARMV8_A57_PERFCTR_DTLB_REFILL_LD                        0x4c
99 #define ARMV8_A57_PERFCTR_DTLB_REFILL_ST                        0x4d
100
101 /* PMUv3 HW events mapping. */
102 static const unsigned armv8_pmuv3_perf_map[PERF_COUNT_HW_MAX] = {
103         PERF_MAP_ALL_UNSUPPORTED,
104         [PERF_COUNT_HW_CPU_CYCLES]              = ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES,
105         [PERF_COUNT_HW_INSTRUCTIONS]            = ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED,
106         [PERF_COUNT_HW_CACHE_REFERENCES]        = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
107         [PERF_COUNT_HW_CACHE_MISSES]            = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
108         [PERF_COUNT_HW_BRANCH_MISSES]           = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
109 };
110
111 /* ARM Cortex-A53 HW events mapping. */
112 static const unsigned armv8_a53_perf_map[PERF_COUNT_HW_MAX] = {
113         PERF_MAP_ALL_UNSUPPORTED,
114         [PERF_COUNT_HW_CPU_CYCLES]              = ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES,
115         [PERF_COUNT_HW_INSTRUCTIONS]            = ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED,
116         [PERF_COUNT_HW_CACHE_REFERENCES]        = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
117         [PERF_COUNT_HW_CACHE_MISSES]            = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
118         [PERF_COUNT_HW_BRANCH_INSTRUCTIONS]     = ARMV8_PMUV3_PERFCTR_PC_WRITE,
119         [PERF_COUNT_HW_BRANCH_MISSES]           = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
120         [PERF_COUNT_HW_BUS_CYCLES]              = ARMV8_PMUV3_PERFCTR_BUS_CYCLES,
121 };
122
123 static const unsigned armv8_a57_perf_map[PERF_COUNT_HW_MAX] = {
124         PERF_MAP_ALL_UNSUPPORTED,
125         [PERF_COUNT_HW_CPU_CYCLES]              = ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES,
126         [PERF_COUNT_HW_INSTRUCTIONS]            = ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED,
127         [PERF_COUNT_HW_CACHE_REFERENCES]        = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
128         [PERF_COUNT_HW_CACHE_MISSES]            = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
129         [PERF_COUNT_HW_BRANCH_MISSES]           = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
130         [PERF_COUNT_HW_BUS_CYCLES]              = ARMV8_PMUV3_PERFCTR_BUS_CYCLES,
131 };
132
133 static const unsigned armv8_pmuv3_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
134                                                 [PERF_COUNT_HW_CACHE_OP_MAX]
135                                                 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
136         PERF_CACHE_MAP_ALL_UNSUPPORTED,
137
138         [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
139         [C(L1D)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
140         [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
141         [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
142
143         [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
144         [C(BPU)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
145         [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
146         [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
147 };
148
149 static const unsigned armv8_a53_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
150                                               [PERF_COUNT_HW_CACHE_OP_MAX]
151                                               [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
152         PERF_CACHE_MAP_ALL_UNSUPPORTED,
153
154         [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
155         [C(L1D)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
156         [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
157         [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
158         [C(L1D)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV8_A53_PERFCTR_PREFETCH_LINEFILL,
159
160         [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_L1_ICACHE_ACCESS,
161         [C(L1I)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_L1_ICACHE_REFILL,
162
163         [C(ITLB)][C(OP_READ)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_ITLB_REFILL,
164
165         [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
166         [C(BPU)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
167         [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
168         [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
169 };
170
171 static const unsigned armv8_a57_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
172                                               [PERF_COUNT_HW_CACHE_OP_MAX]
173                                               [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
174         PERF_CACHE_MAP_ALL_UNSUPPORTED,
175
176         [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_A57_PERFCTR_L1_DCACHE_ACCESS_LD,
177         [C(L1D)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_A57_PERFCTR_L1_DCACHE_REFILL_LD,
178         [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_A57_PERFCTR_L1_DCACHE_ACCESS_ST,
179         [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_A57_PERFCTR_L1_DCACHE_REFILL_ST,
180
181         [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_L1_ICACHE_ACCESS,
182         [C(L1I)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_L1_ICACHE_REFILL,
183
184         [C(DTLB)][C(OP_READ)][C(RESULT_MISS)]   = ARMV8_A57_PERFCTR_DTLB_REFILL_LD,
185         [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)]  = ARMV8_A57_PERFCTR_DTLB_REFILL_ST,
186
187         [C(ITLB)][C(OP_READ)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_ITLB_REFILL,
188
189         [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
190         [C(BPU)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
191         [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
192         [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
193 };
194
195 #define ARMV8_EVENT_ATTR_RESOLVE(m) #m
196 #define ARMV8_EVENT_ATTR(name, config) \
197         PMU_EVENT_ATTR_STRING(name, armv8_event_attr_##name, \
198                               "event=" ARMV8_EVENT_ATTR_RESOLVE(config))
199
200 ARMV8_EVENT_ATTR(sw_incr, ARMV8_PMUV3_PERFCTR_PMNC_SW_INCR);
201 ARMV8_EVENT_ATTR(l1i_cache_refill, ARMV8_PMUV3_PERFCTR_L1_ICACHE_REFILL);
202 ARMV8_EVENT_ATTR(l1i_tlb_refill, ARMV8_PMUV3_PERFCTR_ITLB_REFILL);
203 ARMV8_EVENT_ATTR(l1d_cache_refill, ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL);
204 ARMV8_EVENT_ATTR(l1d_cache, ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS);
205 ARMV8_EVENT_ATTR(l1d_tlb_refill, ARMV8_PMUV3_PERFCTR_DTLB_REFILL);
206 ARMV8_EVENT_ATTR(ld_retired, ARMV8_PMUV3_PERFCTR_MEM_READ);
207 ARMV8_EVENT_ATTR(st_retired, ARMV8_PMUV3_PERFCTR_MEM_WRITE);
208 ARMV8_EVENT_ATTR(inst_retired, ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED);
209 ARMV8_EVENT_ATTR(exc_taken, ARMV8_PMUV3_PERFCTR_EXC_TAKEN);
210 ARMV8_EVENT_ATTR(exc_return, ARMV8_PMUV3_PERFCTR_EXC_EXECUTED);
211 ARMV8_EVENT_ATTR(cid_write_retired, ARMV8_PMUV3_PERFCTR_CID_WRITE);
212 ARMV8_EVENT_ATTR(pc_write_retired, ARMV8_PMUV3_PERFCTR_PC_WRITE);
213 ARMV8_EVENT_ATTR(br_immed_retired, ARMV8_PMUV3_PERFCTR_PC_IMM_BRANCH);
214 ARMV8_EVENT_ATTR(br_return_retired, ARMV8_PMUV3_PERFCTR_PC_PROC_RETURN);
215 ARMV8_EVENT_ATTR(unaligned_ldst_retired, ARMV8_PMUV3_PERFCTR_MEM_UNALIGNED_ACCESS);
216 ARMV8_EVENT_ATTR(br_mis_pred, ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED);
217 ARMV8_EVENT_ATTR(cpu_cycles, ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES);
218 ARMV8_EVENT_ATTR(br_pred, ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED);
219 ARMV8_EVENT_ATTR(mem_access, ARMV8_PMUV3_PERFCTR_MEM_ACCESS);
220 ARMV8_EVENT_ATTR(l1i_cache, ARMV8_PMUV3_PERFCTR_L1_ICACHE_ACCESS);
221 ARMV8_EVENT_ATTR(l1d_cache_wb, ARMV8_PMUV3_PERFCTR_L1_DCACHE_WB);
222 ARMV8_EVENT_ATTR(l2d_cache, ARMV8_PMUV3_PERFCTR_L2_CACHE_ACCESS);
223 ARMV8_EVENT_ATTR(l2d_cache_refill, ARMV8_PMUV3_PERFCTR_L2_CACHE_REFILL);
224 ARMV8_EVENT_ATTR(l2d_cache_wb, ARMV8_PMUV3_PERFCTR_L2_CACHE_WB);
225 ARMV8_EVENT_ATTR(bus_access, ARMV8_PMUV3_PERFCTR_BUS_ACCESS);
226 ARMV8_EVENT_ATTR(memory_error, ARMV8_PMUV3_PERFCTR_MEM_ERROR);
227 ARMV8_EVENT_ATTR(inst_spec, ARMV8_PMUV3_PERFCTR_OP_SPEC);
228 ARMV8_EVENT_ATTR(ttbr_write_retired, ARMV8_PMUV3_PERFCTR_TTBR_WRITE);
229 ARMV8_EVENT_ATTR(bus_cycles, ARMV8_PMUV3_PERFCTR_BUS_CYCLES);
230 ARMV8_EVENT_ATTR(chain, ARMV8_PMUV3_PERFCTR_CHAIN);
231 ARMV8_EVENT_ATTR(l1d_cache_allocate, ARMV8_PMUV3_PERFCTR_L1D_CACHE_ALLOCATE);
232 ARMV8_EVENT_ATTR(l2d_cache_allocate, ARMV8_PMUV3_PERFCTR_L2D_CACHE_ALLOCATE);
233 ARMV8_EVENT_ATTR(br_retired, ARMV8_PMUV3_PERFCTR_BR_RETIRED);
234 ARMV8_EVENT_ATTR(br_mis_pred_retired, ARMV8_PMUV3_PERFCTR_BR_MIS_PRED_RETIRED);
235 ARMV8_EVENT_ATTR(stall_frontend, ARMV8_PMUV3_PERFCTR_STALL_FRONTEND);
236 ARMV8_EVENT_ATTR(stall_backend, ARMV8_PMUV3_PERFCTR_STALL_BACKEND);
237 ARMV8_EVENT_ATTR(l1d_tlb, ARMV8_PMUV3_PERFCTR_L1D_TLB);
238 ARMV8_EVENT_ATTR(l1i_tlb, ARMV8_PMUV3_PERFCTR_L1I_TLB);
239 ARMV8_EVENT_ATTR(l2i_cache, ARMV8_PMUV3_PERFCTR_L2I_CACHE);
240 ARMV8_EVENT_ATTR(l2i_cache_refill, ARMV8_PMUV3_PERFCTR_L2I_CACHE_REFILL);
241 ARMV8_EVENT_ATTR(l3d_cache_allocate, ARMV8_PMUV3_PERFCTR_L3D_CACHE_ALLOCATE);
242 ARMV8_EVENT_ATTR(l3d_cache_refill, ARMV8_PMUV3_PERFCTR_L3D_CACHE_REFILL);
243 ARMV8_EVENT_ATTR(l3d_cache, ARMV8_PMUV3_PERFCTR_L3D_CACHE);
244 ARMV8_EVENT_ATTR(l3d_cache_wb, ARMV8_PMUV3_PERFCTR_L3D_CACHE_WB);
245 ARMV8_EVENT_ATTR(l2d_tlb_refill, ARMV8_PMUV3_PERFCTR_L2D_TLB_REFILL);
246 ARMV8_EVENT_ATTR(l21_tlb_refill, ARMV8_PMUV3_PERFCTR_L21_TLB_REFILL);
247 ARMV8_EVENT_ATTR(l2d_tlb, ARMV8_PMUV3_PERFCTR_L2D_TLB);
248 ARMV8_EVENT_ATTR(l21_tlb, ARMV8_PMUV3_PERFCTR_L21_TLB);
249
250 static struct attribute *armv8_pmuv3_event_attrs[] = {
251         &armv8_event_attr_sw_incr.attr.attr,
252         &armv8_event_attr_l1i_cache_refill.attr.attr,
253         &armv8_event_attr_l1i_tlb_refill.attr.attr,
254         &armv8_event_attr_l1d_cache_refill.attr.attr,
255         &armv8_event_attr_l1d_cache.attr.attr,
256         &armv8_event_attr_l1d_tlb_refill.attr.attr,
257         &armv8_event_attr_ld_retired.attr.attr,
258         &armv8_event_attr_st_retired.attr.attr,
259         &armv8_event_attr_inst_retired.attr.attr,
260         &armv8_event_attr_exc_taken.attr.attr,
261         &armv8_event_attr_exc_return.attr.attr,
262         &armv8_event_attr_cid_write_retired.attr.attr,
263         &armv8_event_attr_pc_write_retired.attr.attr,
264         &armv8_event_attr_br_immed_retired.attr.attr,
265         &armv8_event_attr_br_return_retired.attr.attr,
266         &armv8_event_attr_unaligned_ldst_retired.attr.attr,
267         &armv8_event_attr_br_mis_pred.attr.attr,
268         &armv8_event_attr_cpu_cycles.attr.attr,
269         &armv8_event_attr_br_pred.attr.attr,
270         &armv8_event_attr_mem_access.attr.attr,
271         &armv8_event_attr_l1i_cache.attr.attr,
272         &armv8_event_attr_l1d_cache_wb.attr.attr,
273         &armv8_event_attr_l2d_cache.attr.attr,
274         &armv8_event_attr_l2d_cache_refill.attr.attr,
275         &armv8_event_attr_l2d_cache_wb.attr.attr,
276         &armv8_event_attr_bus_access.attr.attr,
277         &armv8_event_attr_memory_error.attr.attr,
278         &armv8_event_attr_inst_spec.attr.attr,
279         &armv8_event_attr_ttbr_write_retired.attr.attr,
280         &armv8_event_attr_bus_cycles.attr.attr,
281         &armv8_event_attr_chain.attr.attr,
282         &armv8_event_attr_l1d_cache_allocate.attr.attr,
283         &armv8_event_attr_l2d_cache_allocate.attr.attr,
284         &armv8_event_attr_br_retired.attr.attr,
285         &armv8_event_attr_br_mis_pred_retired.attr.attr,
286         &armv8_event_attr_stall_frontend.attr.attr,
287         &armv8_event_attr_stall_backend.attr.attr,
288         &armv8_event_attr_l1d_tlb.attr.attr,
289         &armv8_event_attr_l1i_tlb.attr.attr,
290         &armv8_event_attr_l2i_cache.attr.attr,
291         &armv8_event_attr_l2i_cache_refill.attr.attr,
292         &armv8_event_attr_l3d_cache_allocate.attr.attr,
293         &armv8_event_attr_l3d_cache_refill.attr.attr,
294         &armv8_event_attr_l3d_cache.attr.attr,
295         &armv8_event_attr_l3d_cache_wb.attr.attr,
296         &armv8_event_attr_l2d_tlb_refill.attr.attr,
297         &armv8_event_attr_l21_tlb_refill.attr.attr,
298         &armv8_event_attr_l2d_tlb.attr.attr,
299         &armv8_event_attr_l21_tlb.attr.attr,
300         NULL,
301 };
302
303 static struct attribute_group armv8_pmuv3_events_attr_group = {
304         .name = "events",
305         .attrs = armv8_pmuv3_event_attrs,
306 };
307
308 PMU_FORMAT_ATTR(event, "config:0-9");
309
310 static struct attribute *armv8_pmuv3_format_attrs[] = {
311         &format_attr_event.attr,
312         NULL,
313 };
314
315 static struct attribute_group armv8_pmuv3_format_attr_group = {
316         .name = "format",
317         .attrs = armv8_pmuv3_format_attrs,
318 };
319
320 static const struct attribute_group *armv8_pmuv3_attr_groups[] = {
321         &armv8_pmuv3_events_attr_group,
322         &armv8_pmuv3_format_attr_group,
323         NULL,
324 };
325
326
327 /*
328  * Perf Events' indices
329  */
330 #define ARMV8_IDX_CYCLE_COUNTER 0
331 #define ARMV8_IDX_COUNTER0      1
332 #define ARMV8_IDX_COUNTER_LAST(cpu_pmu) \
333         (ARMV8_IDX_CYCLE_COUNTER + cpu_pmu->num_events - 1)
334
335 #define ARMV8_MAX_COUNTERS      32
336 #define ARMV8_COUNTER_MASK      (ARMV8_MAX_COUNTERS - 1)
337
338 /*
339  * ARMv8 low level PMU access
340  */
341
342 /*
343  * Perf Event to low level counters mapping
344  */
345 #define ARMV8_IDX_TO_COUNTER(x) \
346         (((x) - ARMV8_IDX_COUNTER0) & ARMV8_COUNTER_MASK)
347
348 /*
349  * Per-CPU PMCR: config reg
350  */
351 #define ARMV8_PMCR_E            (1 << 0) /* Enable all counters */
352 #define ARMV8_PMCR_P            (1 << 1) /* Reset all counters */
353 #define ARMV8_PMCR_C            (1 << 2) /* Cycle counter reset */
354 #define ARMV8_PMCR_D            (1 << 3) /* CCNT counts every 64th cpu cycle */
355 #define ARMV8_PMCR_X            (1 << 4) /* Export to ETM */
356 #define ARMV8_PMCR_DP           (1 << 5) /* Disable CCNT if non-invasive debug*/
357 #define ARMV8_PMCR_N_SHIFT      11       /* Number of counters supported */
358 #define ARMV8_PMCR_N_MASK       0x1f
359 #define ARMV8_PMCR_MASK         0x3f     /* Mask for writable bits */
360
361 /*
362  * PMOVSR: counters overflow flag status reg
363  */
364 #define ARMV8_OVSR_MASK         0xffffffff      /* Mask for writable bits */
365 #define ARMV8_OVERFLOWED_MASK   ARMV8_OVSR_MASK
366
367 /*
368  * PMXEVTYPER: Event selection reg
369  */
370 #define ARMV8_EVTYPE_MASK       0xc80003ff      /* Mask for writable bits */
371 #define ARMV8_EVTYPE_EVENT      0x3ff           /* Mask for EVENT bits */
372
373 /*
374  * Event filters for PMUv3
375  */
376 #define ARMV8_EXCLUDE_EL1       (1 << 31)
377 #define ARMV8_EXCLUDE_EL0       (1 << 30)
378 #define ARMV8_INCLUDE_EL2       (1 << 27)
379
380 static inline u32 armv8pmu_pmcr_read(void)
381 {
382         u32 val;
383         asm volatile("mrs %0, pmcr_el0" : "=r" (val));
384         return val;
385 }
386
387 static inline void armv8pmu_pmcr_write(u32 val)
388 {
389         val &= ARMV8_PMCR_MASK;
390         isb();
391         asm volatile("msr pmcr_el0, %0" :: "r" (val));
392 }
393
394 static inline int armv8pmu_has_overflowed(u32 pmovsr)
395 {
396         return pmovsr & ARMV8_OVERFLOWED_MASK;
397 }
398
399 static inline int armv8pmu_counter_valid(struct arm_pmu *cpu_pmu, int idx)
400 {
401         return idx >= ARMV8_IDX_CYCLE_COUNTER &&
402                 idx <= ARMV8_IDX_COUNTER_LAST(cpu_pmu);
403 }
404
405 static inline int armv8pmu_counter_has_overflowed(u32 pmnc, int idx)
406 {
407         return pmnc & BIT(ARMV8_IDX_TO_COUNTER(idx));
408 }
409
410 static inline int armv8pmu_select_counter(int idx)
411 {
412         u32 counter = ARMV8_IDX_TO_COUNTER(idx);
413         asm volatile("msr pmselr_el0, %0" :: "r" (counter));
414         isb();
415
416         return idx;
417 }
418
419 static inline u32 armv8pmu_read_counter(struct perf_event *event)
420 {
421         struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
422         struct hw_perf_event *hwc = &event->hw;
423         int idx = hwc->idx;
424         u32 value = 0;
425
426         if (!armv8pmu_counter_valid(cpu_pmu, idx))
427                 pr_err("CPU%u reading wrong counter %d\n",
428                         smp_processor_id(), idx);
429         else if (idx == ARMV8_IDX_CYCLE_COUNTER)
430                 asm volatile("mrs %0, pmccntr_el0" : "=r" (value));
431         else if (armv8pmu_select_counter(idx) == idx)
432                 asm volatile("mrs %0, pmxevcntr_el0" : "=r" (value));
433
434         return value;
435 }
436
437 static inline void armv8pmu_write_counter(struct perf_event *event, u32 value)
438 {
439         struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
440         struct hw_perf_event *hwc = &event->hw;
441         int idx = hwc->idx;
442
443         if (!armv8pmu_counter_valid(cpu_pmu, idx))
444                 pr_err("CPU%u writing wrong counter %d\n",
445                         smp_processor_id(), idx);
446         else if (idx == ARMV8_IDX_CYCLE_COUNTER)
447                 asm volatile("msr pmccntr_el0, %0" :: "r" (value));
448         else if (armv8pmu_select_counter(idx) == idx)
449                 asm volatile("msr pmxevcntr_el0, %0" :: "r" (value));
450 }
451
452 static inline void armv8pmu_write_evtype(int idx, u32 val)
453 {
454         if (armv8pmu_select_counter(idx) == idx) {
455                 val &= ARMV8_EVTYPE_MASK;
456                 asm volatile("msr pmxevtyper_el0, %0" :: "r" (val));
457         }
458 }
459
460 static inline int armv8pmu_enable_counter(int idx)
461 {
462         u32 counter = ARMV8_IDX_TO_COUNTER(idx);
463         asm volatile("msr pmcntenset_el0, %0" :: "r" (BIT(counter)));
464         return idx;
465 }
466
467 static inline int armv8pmu_disable_counter(int idx)
468 {
469         u32 counter = ARMV8_IDX_TO_COUNTER(idx);
470         asm volatile("msr pmcntenclr_el0, %0" :: "r" (BIT(counter)));
471         return idx;
472 }
473
474 static inline int armv8pmu_enable_intens(int idx)
475 {
476         u32 counter = ARMV8_IDX_TO_COUNTER(idx);
477         asm volatile("msr pmintenset_el1, %0" :: "r" (BIT(counter)));
478         return idx;
479 }
480
481 static inline int armv8pmu_disable_intens(int idx)
482 {
483         u32 counter = ARMV8_IDX_TO_COUNTER(idx);
484         asm volatile("msr pmintenclr_el1, %0" :: "r" (BIT(counter)));
485         isb();
486         /* Clear the overflow flag in case an interrupt is pending. */
487         asm volatile("msr pmovsclr_el0, %0" :: "r" (BIT(counter)));
488         isb();
489
490         return idx;
491 }
492
493 static inline u32 armv8pmu_getreset_flags(void)
494 {
495         u32 value;
496
497         /* Read */
498         asm volatile("mrs %0, pmovsclr_el0" : "=r" (value));
499
500         /* Write to clear flags */
501         value &= ARMV8_OVSR_MASK;
502         asm volatile("msr pmovsclr_el0, %0" :: "r" (value));
503
504         return value;
505 }
506
507 static void armv8pmu_enable_event(struct perf_event *event)
508 {
509         unsigned long flags;
510         struct hw_perf_event *hwc = &event->hw;
511         struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
512         struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
513         int idx = hwc->idx;
514
515         /*
516          * Enable counter and interrupt, and set the counter to count
517          * the event that we're interested in.
518          */
519         raw_spin_lock_irqsave(&events->pmu_lock, flags);
520
521         /*
522          * Disable counter
523          */
524         armv8pmu_disable_counter(idx);
525
526         /*
527          * Set event (if destined for PMNx counters).
528          */
529         armv8pmu_write_evtype(idx, hwc->config_base);
530
531         /*
532          * Enable interrupt for this counter
533          */
534         armv8pmu_enable_intens(idx);
535
536         /*
537          * Enable counter
538          */
539         armv8pmu_enable_counter(idx);
540
541         raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
542 }
543
544 static void armv8pmu_disable_event(struct perf_event *event)
545 {
546         unsigned long flags;
547         struct hw_perf_event *hwc = &event->hw;
548         struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
549         struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
550         int idx = hwc->idx;
551
552         /*
553          * Disable counter and interrupt
554          */
555         raw_spin_lock_irqsave(&events->pmu_lock, flags);
556
557         /*
558          * Disable counter
559          */
560         armv8pmu_disable_counter(idx);
561
562         /*
563          * Disable interrupt for this counter
564          */
565         armv8pmu_disable_intens(idx);
566
567         raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
568 }
569
570 static irqreturn_t armv8pmu_handle_irq(int irq_num, void *dev)
571 {
572         u32 pmovsr;
573         struct perf_sample_data data;
574         struct arm_pmu *cpu_pmu = (struct arm_pmu *)dev;
575         struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events);
576         struct pt_regs *regs;
577         int idx;
578
579         /*
580          * Get and reset the IRQ flags
581          */
582         pmovsr = armv8pmu_getreset_flags();
583
584         /*
585          * Did an overflow occur?
586          */
587         if (!armv8pmu_has_overflowed(pmovsr))
588                 return IRQ_NONE;
589
590         /*
591          * Handle the counter(s) overflow(s)
592          */
593         regs = get_irq_regs();
594
595         for (idx = 0; idx < cpu_pmu->num_events; ++idx) {
596                 struct perf_event *event = cpuc->events[idx];
597                 struct hw_perf_event *hwc;
598
599                 /* Ignore if we don't have an event. */
600                 if (!event)
601                         continue;
602
603                 /*
604                  * We have a single interrupt for all counters. Check that
605                  * each counter has overflowed before we process it.
606                  */
607                 if (!armv8pmu_counter_has_overflowed(pmovsr, idx))
608                         continue;
609
610                 hwc = &event->hw;
611                 armpmu_event_update(event);
612                 perf_sample_data_init(&data, 0, hwc->last_period);
613                 if (!armpmu_event_set_period(event))
614                         continue;
615
616                 if (perf_event_overflow(event, &data, regs))
617                         cpu_pmu->disable(event);
618         }
619
620         /*
621          * Handle the pending perf events.
622          *
623          * Note: this call *must* be run with interrupts disabled. For
624          * platforms that can have the PMU interrupts raised as an NMI, this
625          * will not work.
626          */
627         irq_work_run();
628
629         return IRQ_HANDLED;
630 }
631
632 static void armv8pmu_start(struct arm_pmu *cpu_pmu)
633 {
634         unsigned long flags;
635         struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
636
637         raw_spin_lock_irqsave(&events->pmu_lock, flags);
638         /* Enable all counters */
639         armv8pmu_pmcr_write(armv8pmu_pmcr_read() | ARMV8_PMCR_E);
640         raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
641 }
642
643 static void armv8pmu_stop(struct arm_pmu *cpu_pmu)
644 {
645         unsigned long flags;
646         struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
647
648         raw_spin_lock_irqsave(&events->pmu_lock, flags);
649         /* Disable all counters */
650         armv8pmu_pmcr_write(armv8pmu_pmcr_read() & ~ARMV8_PMCR_E);
651         raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
652 }
653
654 static int armv8pmu_get_event_idx(struct pmu_hw_events *cpuc,
655                                   struct perf_event *event)
656 {
657         int idx;
658         struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
659         struct hw_perf_event *hwc = &event->hw;
660         unsigned long evtype = hwc->config_base & ARMV8_EVTYPE_EVENT;
661
662         /* Always place a cycle counter into the cycle counter. */
663         if (evtype == ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES) {
664                 if (test_and_set_bit(ARMV8_IDX_CYCLE_COUNTER, cpuc->used_mask))
665                         return -EAGAIN;
666
667                 return ARMV8_IDX_CYCLE_COUNTER;
668         }
669
670         /*
671          * For anything other than a cycle counter, try and use
672          * the events counters
673          */
674         for (idx = ARMV8_IDX_COUNTER0; idx < cpu_pmu->num_events; ++idx) {
675                 if (!test_and_set_bit(idx, cpuc->used_mask))
676                         return idx;
677         }
678
679         /* The counters are all in use. */
680         return -EAGAIN;
681 }
682
683 /*
684  * Add an event filter to a given event. This will only work for PMUv2 PMUs.
685  */
686 static int armv8pmu_set_event_filter(struct hw_perf_event *event,
687                                      struct perf_event_attr *attr)
688 {
689         unsigned long config_base = 0;
690
691         if (attr->exclude_idle)
692                 return -EPERM;
693         if (attr->exclude_user)
694                 config_base |= ARMV8_EXCLUDE_EL0;
695         if (attr->exclude_kernel)
696                 config_base |= ARMV8_EXCLUDE_EL1;
697         if (!attr->exclude_hv)
698                 config_base |= ARMV8_INCLUDE_EL2;
699
700         /*
701          * Install the filter into config_base as this is used to
702          * construct the event type.
703          */
704         event->config_base = config_base;
705
706         return 0;
707 }
708
709 static void armv8pmu_reset(void *info)
710 {
711         struct arm_pmu *cpu_pmu = (struct arm_pmu *)info;
712         u32 idx, nb_cnt = cpu_pmu->num_events;
713
714         /* The counter and interrupt enable registers are unknown at reset. */
715         for (idx = ARMV8_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) {
716                 armv8pmu_disable_counter(idx);
717                 armv8pmu_disable_intens(idx);
718         }
719
720         /* Initialize & Reset PMNC: C and P bits. */
721         armv8pmu_pmcr_write(ARMV8_PMCR_P | ARMV8_PMCR_C);
722 }
723
724 static int armv8_pmuv3_map_event(struct perf_event *event)
725 {
726         return armpmu_map_event(event, &armv8_pmuv3_perf_map,
727                                 &armv8_pmuv3_perf_cache_map,
728                                 ARMV8_EVTYPE_EVENT);
729 }
730
731 static int armv8_a53_map_event(struct perf_event *event)
732 {
733         return armpmu_map_event(event, &armv8_a53_perf_map,
734                                 &armv8_a53_perf_cache_map,
735                                 ARMV8_EVTYPE_EVENT);
736 }
737
738 static int armv8_a57_map_event(struct perf_event *event)
739 {
740         return armpmu_map_event(event, &armv8_a57_perf_map,
741                                 &armv8_a57_perf_cache_map,
742                                 ARMV8_EVTYPE_EVENT);
743 }
744
745 static void armv8pmu_read_num_pmnc_events(void *info)
746 {
747         int *nb_cnt = info;
748
749         /* Read the nb of CNTx counters supported from PMNC */
750         *nb_cnt = (armv8pmu_pmcr_read() >> ARMV8_PMCR_N_SHIFT) & ARMV8_PMCR_N_MASK;
751
752         /* Add the CPU cycles counter */
753         *nb_cnt += 1;
754 }
755
756 static int armv8pmu_probe_num_events(struct arm_pmu *arm_pmu)
757 {
758         return smp_call_function_any(&arm_pmu->supported_cpus,
759                                     armv8pmu_read_num_pmnc_events,
760                                     &arm_pmu->num_events, 1);
761 }
762
763 static void armv8_pmu_init(struct arm_pmu *cpu_pmu)
764 {
765         cpu_pmu->handle_irq             = armv8pmu_handle_irq,
766         cpu_pmu->enable                 = armv8pmu_enable_event,
767         cpu_pmu->disable                = armv8pmu_disable_event,
768         cpu_pmu->read_counter           = armv8pmu_read_counter,
769         cpu_pmu->write_counter          = armv8pmu_write_counter,
770         cpu_pmu->get_event_idx          = armv8pmu_get_event_idx,
771         cpu_pmu->start                  = armv8pmu_start,
772         cpu_pmu->stop                   = armv8pmu_stop,
773         cpu_pmu->reset                  = armv8pmu_reset,
774         cpu_pmu->max_period             = (1LLU << 32) - 1,
775         cpu_pmu->set_event_filter       = armv8pmu_set_event_filter;
776 }
777
778 static int armv8_pmuv3_init(struct arm_pmu *cpu_pmu)
779 {
780         armv8_pmu_init(cpu_pmu);
781         cpu_pmu->name                   = "armv8_pmuv3";
782         cpu_pmu->map_event              = armv8_pmuv3_map_event;
783         return armv8pmu_probe_num_events(cpu_pmu);
784 }
785
786 static int armv8_a53_pmu_init(struct arm_pmu *cpu_pmu)
787 {
788         armv8_pmu_init(cpu_pmu);
789         cpu_pmu->name                   = "armv8_cortex_a53";
790         cpu_pmu->map_event              = armv8_a53_map_event;
791         cpu_pmu->pmu.attr_groups        = armv8_pmuv3_attr_groups;
792         return armv8pmu_probe_num_events(cpu_pmu);
793 }
794
795 static int armv8_a57_pmu_init(struct arm_pmu *cpu_pmu)
796 {
797         armv8_pmu_init(cpu_pmu);
798         cpu_pmu->name                   = "armv8_cortex_a57";
799         cpu_pmu->map_event              = armv8_a57_map_event;
800         cpu_pmu->pmu.attr_groups        = armv8_pmuv3_attr_groups;
801         return armv8pmu_probe_num_events(cpu_pmu);
802 }
803
804 static const struct of_device_id armv8_pmu_of_device_ids[] = {
805         {.compatible = "arm,armv8-pmuv3",       .data = armv8_pmuv3_init},
806         {.compatible = "arm,cortex-a53-pmu",    .data = armv8_a53_pmu_init},
807         {.compatible = "arm,cortex-a57-pmu",    .data = armv8_a57_pmu_init},
808         {},
809 };
810
811 static int armv8_pmu_device_probe(struct platform_device *pdev)
812 {
813         return arm_pmu_device_probe(pdev, armv8_pmu_of_device_ids, NULL);
814 }
815
816 static struct platform_driver armv8_pmu_driver = {
817         .driver         = {
818                 .name   = "armv8-pmu",
819                 .of_match_table = armv8_pmu_of_device_ids,
820         },
821         .probe          = armv8_pmu_device_probe,
822 };
823
824 static int __init register_armv8_pmu_driver(void)
825 {
826         return platform_driver_register(&armv8_pmu_driver);
827 }
828 device_initcall(register_armv8_pmu_driver);