arm64: dts: rk3399-mid: rk818 enabled boost_otg fix ota vcc5v poweroff when susppend
[firefly-linux-kernel-4.4.55.git] / arch / arm64 / kernel / perf_event.c
1 /*
2  * PMU support
3  *
4  * Copyright (C) 2012 ARM Limited
5  * Author: Will Deacon <will.deacon@arm.com>
6  *
7  * This code is based heavily on the ARMv7 perf event code.
8  *
9  * This program is free software; you can redistribute it and/or modify
10  * it under the terms of the GNU General Public License version 2 as
11  * published by the Free Software Foundation.
12  *
13  * This program is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16  * GNU General Public License for more details.
17  *
18  * You should have received a copy of the GNU General Public License
19  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
20  */
21
22 #include <asm/irq_regs.h>
23
24 #include <linux/of.h>
25 #include <linux/perf/arm_pmu.h>
26 #include <linux/platform_device.h>
27
28 /*
29  * ARMv8 PMUv3 Performance Events handling code.
30  * Common event types.
31  */
32
33 /* Required events. */
34 #define ARMV8_PMUV3_PERFCTR_PMNC_SW_INCR                        0x00
35 #define ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL                    0x03
36 #define ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS                    0x04
37 #define ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED                  0x10
38 #define ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES                        0x11
39 #define ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED                      0x12
40
41 /* At least one of the following is required. */
42 #define ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED                      0x08
43 #define ARMV8_PMUV3_PERFCTR_OP_SPEC                             0x1B
44
45 /* Common architectural events. */
46 #define ARMV8_PMUV3_PERFCTR_MEM_READ                            0x06
47 #define ARMV8_PMUV3_PERFCTR_MEM_WRITE                           0x07
48 #define ARMV8_PMUV3_PERFCTR_EXC_TAKEN                           0x09
49 #define ARMV8_PMUV3_PERFCTR_EXC_EXECUTED                        0x0A
50 #define ARMV8_PMUV3_PERFCTR_CID_WRITE                           0x0B
51 #define ARMV8_PMUV3_PERFCTR_PC_WRITE                            0x0C
52 #define ARMV8_PMUV3_PERFCTR_PC_IMM_BRANCH                       0x0D
53 #define ARMV8_PMUV3_PERFCTR_PC_PROC_RETURN                      0x0E
54 #define ARMV8_PMUV3_PERFCTR_MEM_UNALIGNED_ACCESS                0x0F
55 #define ARMV8_PMUV3_PERFCTR_TTBR_WRITE                          0x1C
56 #define ARMV8_PMUV3_PERFCTR_CHAIN                               0x1E
57 #define ARMV8_PMUV3_PERFCTR_BR_RETIRED                          0x21
58
59 /* Common microarchitectural events. */
60 #define ARMV8_PMUV3_PERFCTR_L1_ICACHE_REFILL                    0x01
61 #define ARMV8_PMUV3_PERFCTR_ITLB_REFILL                         0x02
62 #define ARMV8_PMUV3_PERFCTR_DTLB_REFILL                         0x05
63 #define ARMV8_PMUV3_PERFCTR_MEM_ACCESS                          0x13
64 #define ARMV8_PMUV3_PERFCTR_L1_ICACHE_ACCESS                    0x14
65 #define ARMV8_PMUV3_PERFCTR_L1_DCACHE_WB                        0x15
66 #define ARMV8_PMUV3_PERFCTR_L2_CACHE_ACCESS                     0x16
67 #define ARMV8_PMUV3_PERFCTR_L2_CACHE_REFILL                     0x17
68 #define ARMV8_PMUV3_PERFCTR_L2_CACHE_WB                         0x18
69 #define ARMV8_PMUV3_PERFCTR_BUS_ACCESS                          0x19
70 #define ARMV8_PMUV3_PERFCTR_MEM_ERROR                           0x1A
71 #define ARMV8_PMUV3_PERFCTR_BUS_CYCLES                          0x1D
72 #define ARMV8_PMUV3_PERFCTR_L1D_CACHE_ALLOCATE                  0x1F
73 #define ARMV8_PMUV3_PERFCTR_L2D_CACHE_ALLOCATE                  0x20
74 #define ARMV8_PMUV3_PERFCTR_BR_MIS_PRED_RETIRED                 0x22
75 #define ARMV8_PMUV3_PERFCTR_STALL_FRONTEND                      0x23
76 #define ARMV8_PMUV3_PERFCTR_STALL_BACKEND                       0x24
77 #define ARMV8_PMUV3_PERFCTR_L1D_TLB                             0x25
78 #define ARMV8_PMUV3_PERFCTR_L1I_TLB                             0x26
79 #define ARMV8_PMUV3_PERFCTR_L2I_CACHE                           0x27
80 #define ARMV8_PMUV3_PERFCTR_L2I_CACHE_REFILL                    0x28
81 #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_ALLOCATE                  0x29
82 #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_REFILL                    0x2A
83 #define ARMV8_PMUV3_PERFCTR_L3D_CACHE                           0x2B
84 #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_WB                        0x2C
85 #define ARMV8_PMUV3_PERFCTR_L2D_TLB_REFILL                      0x2D
86 #define ARMV8_PMUV3_PERFCTR_L21_TLB_REFILL                      0x2E
87 #define ARMV8_PMUV3_PERFCTR_L2D_TLB                             0x2F
88 #define ARMV8_PMUV3_PERFCTR_L21_TLB                             0x30
89
90 /* ARMv8 Cortex-A53 specific event types. */
91 #define ARMV8_A53_PERFCTR_PREFETCH_LINEFILL                     0xC2
92
93 /* ARMv8 Cortex-A57 and Cortex-A72 specific event types. */
94 #define ARMV8_A57_PERFCTR_L1_DCACHE_ACCESS_LD                   0x40
95 #define ARMV8_A57_PERFCTR_L1_DCACHE_ACCESS_ST                   0x41
96 #define ARMV8_A57_PERFCTR_L1_DCACHE_REFILL_LD                   0x42
97 #define ARMV8_A57_PERFCTR_L1_DCACHE_REFILL_ST                   0x43
98 #define ARMV8_A57_PERFCTR_DTLB_REFILL_LD                        0x4c
99 #define ARMV8_A57_PERFCTR_DTLB_REFILL_ST                        0x4d
100
101 /* PMUv3 HW events mapping. */
102 static const unsigned armv8_pmuv3_perf_map[PERF_COUNT_HW_MAX] = {
103         PERF_MAP_ALL_UNSUPPORTED,
104         [PERF_COUNT_HW_CPU_CYCLES]              = ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES,
105         [PERF_COUNT_HW_INSTRUCTIONS]            = ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED,
106         [PERF_COUNT_HW_CACHE_REFERENCES]        = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
107         [PERF_COUNT_HW_CACHE_MISSES]            = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
108         [PERF_COUNT_HW_BRANCH_MISSES]           = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
109 };
110
111 /* ARM Cortex-A53 HW events mapping. */
112 static const unsigned armv8_a53_perf_map[PERF_COUNT_HW_MAX] = {
113         PERF_MAP_ALL_UNSUPPORTED,
114         [PERF_COUNT_HW_CPU_CYCLES]              = ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES,
115         [PERF_COUNT_HW_INSTRUCTIONS]            = ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED,
116         [PERF_COUNT_HW_CACHE_REFERENCES]        = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
117         [PERF_COUNT_HW_CACHE_MISSES]            = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
118         [PERF_COUNT_HW_BRANCH_INSTRUCTIONS]     = ARMV8_PMUV3_PERFCTR_PC_WRITE,
119         [PERF_COUNT_HW_BRANCH_MISSES]           = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
120         [PERF_COUNT_HW_BUS_CYCLES]              = ARMV8_PMUV3_PERFCTR_BUS_CYCLES,
121 };
122
123 /* ARM Cortex-A57 and Cortex-A72 events mapping. */
124 static const unsigned armv8_a57_perf_map[PERF_COUNT_HW_MAX] = {
125         PERF_MAP_ALL_UNSUPPORTED,
126         [PERF_COUNT_HW_CPU_CYCLES]              = ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES,
127         [PERF_COUNT_HW_INSTRUCTIONS]            = ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED,
128         [PERF_COUNT_HW_CACHE_REFERENCES]        = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
129         [PERF_COUNT_HW_CACHE_MISSES]            = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
130         [PERF_COUNT_HW_BRANCH_MISSES]           = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
131         [PERF_COUNT_HW_BUS_CYCLES]              = ARMV8_PMUV3_PERFCTR_BUS_CYCLES,
132 };
133
134 static const unsigned armv8_pmuv3_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
135                                                 [PERF_COUNT_HW_CACHE_OP_MAX]
136                                                 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
137         PERF_CACHE_MAP_ALL_UNSUPPORTED,
138
139         [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
140         [C(L1D)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
141         [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
142         [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
143
144         [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
145         [C(BPU)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
146         [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
147         [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
148 };
149
150 static const unsigned armv8_a53_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
151                                               [PERF_COUNT_HW_CACHE_OP_MAX]
152                                               [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
153         PERF_CACHE_MAP_ALL_UNSUPPORTED,
154
155         [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
156         [C(L1D)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
157         [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
158         [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
159         [C(L1D)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV8_A53_PERFCTR_PREFETCH_LINEFILL,
160
161         [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_L1_ICACHE_ACCESS,
162         [C(L1I)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_L1_ICACHE_REFILL,
163
164         [C(ITLB)][C(OP_READ)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_ITLB_REFILL,
165
166         [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
167         [C(BPU)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
168         [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
169         [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
170 };
171
172 static const unsigned armv8_a57_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
173                                               [PERF_COUNT_HW_CACHE_OP_MAX]
174                                               [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
175         PERF_CACHE_MAP_ALL_UNSUPPORTED,
176
177         [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_A57_PERFCTR_L1_DCACHE_ACCESS_LD,
178         [C(L1D)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_A57_PERFCTR_L1_DCACHE_REFILL_LD,
179         [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_A57_PERFCTR_L1_DCACHE_ACCESS_ST,
180         [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_A57_PERFCTR_L1_DCACHE_REFILL_ST,
181
182         [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_L1_ICACHE_ACCESS,
183         [C(L1I)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_L1_ICACHE_REFILL,
184
185         [C(DTLB)][C(OP_READ)][C(RESULT_MISS)]   = ARMV8_A57_PERFCTR_DTLB_REFILL_LD,
186         [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)]  = ARMV8_A57_PERFCTR_DTLB_REFILL_ST,
187
188         [C(ITLB)][C(OP_READ)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_ITLB_REFILL,
189
190         [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
191         [C(BPU)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
192         [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
193         [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
194 };
195
196 #define ARMV8_EVENT_ATTR_RESOLVE(m) #m
197 #define ARMV8_EVENT_ATTR(name, config) \
198         PMU_EVENT_ATTR_STRING(name, armv8_event_attr_##name, \
199                               "event=" ARMV8_EVENT_ATTR_RESOLVE(config))
200
201 ARMV8_EVENT_ATTR(sw_incr, ARMV8_PMUV3_PERFCTR_PMNC_SW_INCR);
202 ARMV8_EVENT_ATTR(l1i_cache_refill, ARMV8_PMUV3_PERFCTR_L1_ICACHE_REFILL);
203 ARMV8_EVENT_ATTR(l1i_tlb_refill, ARMV8_PMUV3_PERFCTR_ITLB_REFILL);
204 ARMV8_EVENT_ATTR(l1d_cache_refill, ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL);
205 ARMV8_EVENT_ATTR(l1d_cache, ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS);
206 ARMV8_EVENT_ATTR(l1d_tlb_refill, ARMV8_PMUV3_PERFCTR_DTLB_REFILL);
207 ARMV8_EVENT_ATTR(ld_retired, ARMV8_PMUV3_PERFCTR_MEM_READ);
208 ARMV8_EVENT_ATTR(st_retired, ARMV8_PMUV3_PERFCTR_MEM_WRITE);
209 ARMV8_EVENT_ATTR(inst_retired, ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED);
210 ARMV8_EVENT_ATTR(exc_taken, ARMV8_PMUV3_PERFCTR_EXC_TAKEN);
211 ARMV8_EVENT_ATTR(exc_return, ARMV8_PMUV3_PERFCTR_EXC_EXECUTED);
212 ARMV8_EVENT_ATTR(cid_write_retired, ARMV8_PMUV3_PERFCTR_CID_WRITE);
213 ARMV8_EVENT_ATTR(pc_write_retired, ARMV8_PMUV3_PERFCTR_PC_WRITE);
214 ARMV8_EVENT_ATTR(br_immed_retired, ARMV8_PMUV3_PERFCTR_PC_IMM_BRANCH);
215 ARMV8_EVENT_ATTR(br_return_retired, ARMV8_PMUV3_PERFCTR_PC_PROC_RETURN);
216 ARMV8_EVENT_ATTR(unaligned_ldst_retired, ARMV8_PMUV3_PERFCTR_MEM_UNALIGNED_ACCESS);
217 ARMV8_EVENT_ATTR(br_mis_pred, ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED);
218 ARMV8_EVENT_ATTR(cpu_cycles, ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES);
219 ARMV8_EVENT_ATTR(br_pred, ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED);
220 ARMV8_EVENT_ATTR(mem_access, ARMV8_PMUV3_PERFCTR_MEM_ACCESS);
221 ARMV8_EVENT_ATTR(l1i_cache, ARMV8_PMUV3_PERFCTR_L1_ICACHE_ACCESS);
222 ARMV8_EVENT_ATTR(l1d_cache_wb, ARMV8_PMUV3_PERFCTR_L1_DCACHE_WB);
223 ARMV8_EVENT_ATTR(l2d_cache, ARMV8_PMUV3_PERFCTR_L2_CACHE_ACCESS);
224 ARMV8_EVENT_ATTR(l2d_cache_refill, ARMV8_PMUV3_PERFCTR_L2_CACHE_REFILL);
225 ARMV8_EVENT_ATTR(l2d_cache_wb, ARMV8_PMUV3_PERFCTR_L2_CACHE_WB);
226 ARMV8_EVENT_ATTR(bus_access, ARMV8_PMUV3_PERFCTR_BUS_ACCESS);
227 ARMV8_EVENT_ATTR(memory_error, ARMV8_PMUV3_PERFCTR_MEM_ERROR);
228 ARMV8_EVENT_ATTR(inst_spec, ARMV8_PMUV3_PERFCTR_OP_SPEC);
229 ARMV8_EVENT_ATTR(ttbr_write_retired, ARMV8_PMUV3_PERFCTR_TTBR_WRITE);
230 ARMV8_EVENT_ATTR(bus_cycles, ARMV8_PMUV3_PERFCTR_BUS_CYCLES);
231 ARMV8_EVENT_ATTR(chain, ARMV8_PMUV3_PERFCTR_CHAIN);
232 ARMV8_EVENT_ATTR(l1d_cache_allocate, ARMV8_PMUV3_PERFCTR_L1D_CACHE_ALLOCATE);
233 ARMV8_EVENT_ATTR(l2d_cache_allocate, ARMV8_PMUV3_PERFCTR_L2D_CACHE_ALLOCATE);
234 ARMV8_EVENT_ATTR(br_retired, ARMV8_PMUV3_PERFCTR_BR_RETIRED);
235 ARMV8_EVENT_ATTR(br_mis_pred_retired, ARMV8_PMUV3_PERFCTR_BR_MIS_PRED_RETIRED);
236 ARMV8_EVENT_ATTR(stall_frontend, ARMV8_PMUV3_PERFCTR_STALL_FRONTEND);
237 ARMV8_EVENT_ATTR(stall_backend, ARMV8_PMUV3_PERFCTR_STALL_BACKEND);
238 ARMV8_EVENT_ATTR(l1d_tlb, ARMV8_PMUV3_PERFCTR_L1D_TLB);
239 ARMV8_EVENT_ATTR(l1i_tlb, ARMV8_PMUV3_PERFCTR_L1I_TLB);
240 ARMV8_EVENT_ATTR(l2i_cache, ARMV8_PMUV3_PERFCTR_L2I_CACHE);
241 ARMV8_EVENT_ATTR(l2i_cache_refill, ARMV8_PMUV3_PERFCTR_L2I_CACHE_REFILL);
242 ARMV8_EVENT_ATTR(l3d_cache_allocate, ARMV8_PMUV3_PERFCTR_L3D_CACHE_ALLOCATE);
243 ARMV8_EVENT_ATTR(l3d_cache_refill, ARMV8_PMUV3_PERFCTR_L3D_CACHE_REFILL);
244 ARMV8_EVENT_ATTR(l3d_cache, ARMV8_PMUV3_PERFCTR_L3D_CACHE);
245 ARMV8_EVENT_ATTR(l3d_cache_wb, ARMV8_PMUV3_PERFCTR_L3D_CACHE_WB);
246 ARMV8_EVENT_ATTR(l2d_tlb_refill, ARMV8_PMUV3_PERFCTR_L2D_TLB_REFILL);
247 ARMV8_EVENT_ATTR(l21_tlb_refill, ARMV8_PMUV3_PERFCTR_L21_TLB_REFILL);
248 ARMV8_EVENT_ATTR(l2d_tlb, ARMV8_PMUV3_PERFCTR_L2D_TLB);
249 ARMV8_EVENT_ATTR(l21_tlb, ARMV8_PMUV3_PERFCTR_L21_TLB);
250
251 static struct attribute *armv8_pmuv3_event_attrs[] = {
252         &armv8_event_attr_sw_incr.attr.attr,
253         &armv8_event_attr_l1i_cache_refill.attr.attr,
254         &armv8_event_attr_l1i_tlb_refill.attr.attr,
255         &armv8_event_attr_l1d_cache_refill.attr.attr,
256         &armv8_event_attr_l1d_cache.attr.attr,
257         &armv8_event_attr_l1d_tlb_refill.attr.attr,
258         &armv8_event_attr_ld_retired.attr.attr,
259         &armv8_event_attr_st_retired.attr.attr,
260         &armv8_event_attr_inst_retired.attr.attr,
261         &armv8_event_attr_exc_taken.attr.attr,
262         &armv8_event_attr_exc_return.attr.attr,
263         &armv8_event_attr_cid_write_retired.attr.attr,
264         &armv8_event_attr_pc_write_retired.attr.attr,
265         &armv8_event_attr_br_immed_retired.attr.attr,
266         &armv8_event_attr_br_return_retired.attr.attr,
267         &armv8_event_attr_unaligned_ldst_retired.attr.attr,
268         &armv8_event_attr_br_mis_pred.attr.attr,
269         &armv8_event_attr_cpu_cycles.attr.attr,
270         &armv8_event_attr_br_pred.attr.attr,
271         &armv8_event_attr_mem_access.attr.attr,
272         &armv8_event_attr_l1i_cache.attr.attr,
273         &armv8_event_attr_l1d_cache_wb.attr.attr,
274         &armv8_event_attr_l2d_cache.attr.attr,
275         &armv8_event_attr_l2d_cache_refill.attr.attr,
276         &armv8_event_attr_l2d_cache_wb.attr.attr,
277         &armv8_event_attr_bus_access.attr.attr,
278         &armv8_event_attr_memory_error.attr.attr,
279         &armv8_event_attr_inst_spec.attr.attr,
280         &armv8_event_attr_ttbr_write_retired.attr.attr,
281         &armv8_event_attr_bus_cycles.attr.attr,
282         &armv8_event_attr_chain.attr.attr,
283         &armv8_event_attr_l1d_cache_allocate.attr.attr,
284         &armv8_event_attr_l2d_cache_allocate.attr.attr,
285         &armv8_event_attr_br_retired.attr.attr,
286         &armv8_event_attr_br_mis_pred_retired.attr.attr,
287         &armv8_event_attr_stall_frontend.attr.attr,
288         &armv8_event_attr_stall_backend.attr.attr,
289         &armv8_event_attr_l1d_tlb.attr.attr,
290         &armv8_event_attr_l1i_tlb.attr.attr,
291         &armv8_event_attr_l2i_cache.attr.attr,
292         &armv8_event_attr_l2i_cache_refill.attr.attr,
293         &armv8_event_attr_l3d_cache_allocate.attr.attr,
294         &armv8_event_attr_l3d_cache_refill.attr.attr,
295         &armv8_event_attr_l3d_cache.attr.attr,
296         &armv8_event_attr_l3d_cache_wb.attr.attr,
297         &armv8_event_attr_l2d_tlb_refill.attr.attr,
298         &armv8_event_attr_l21_tlb_refill.attr.attr,
299         &armv8_event_attr_l2d_tlb.attr.attr,
300         &armv8_event_attr_l21_tlb.attr.attr,
301         NULL,
302 };
303
304 static struct attribute_group armv8_pmuv3_events_attr_group = {
305         .name = "events",
306         .attrs = armv8_pmuv3_event_attrs,
307 };
308
309 PMU_FORMAT_ATTR(event, "config:0-9");
310
311 static struct attribute *armv8_pmuv3_format_attrs[] = {
312         &format_attr_event.attr,
313         NULL,
314 };
315
316 static struct attribute_group armv8_pmuv3_format_attr_group = {
317         .name = "format",
318         .attrs = armv8_pmuv3_format_attrs,
319 };
320
321 static const struct attribute_group *armv8_pmuv3_attr_groups[] = {
322         &armv8_pmuv3_events_attr_group,
323         &armv8_pmuv3_format_attr_group,
324         NULL,
325 };
326
327
328 /*
329  * Perf Events' indices
330  */
331 #define ARMV8_IDX_CYCLE_COUNTER 0
332 #define ARMV8_IDX_COUNTER0      1
333 #define ARMV8_IDX_COUNTER_LAST(cpu_pmu) \
334         (ARMV8_IDX_CYCLE_COUNTER + cpu_pmu->num_events - 1)
335
336 #define ARMV8_MAX_COUNTERS      32
337 #define ARMV8_COUNTER_MASK      (ARMV8_MAX_COUNTERS - 1)
338
339 /*
340  * ARMv8 low level PMU access
341  */
342
343 /*
344  * Perf Event to low level counters mapping
345  */
346 #define ARMV8_IDX_TO_COUNTER(x) \
347         (((x) - ARMV8_IDX_COUNTER0) & ARMV8_COUNTER_MASK)
348
349 /*
350  * Per-CPU PMCR: config reg
351  */
352 #define ARMV8_PMCR_E            (1 << 0) /* Enable all counters */
353 #define ARMV8_PMCR_P            (1 << 1) /* Reset all counters */
354 #define ARMV8_PMCR_C            (1 << 2) /* Cycle counter reset */
355 #define ARMV8_PMCR_D            (1 << 3) /* CCNT counts every 64th cpu cycle */
356 #define ARMV8_PMCR_X            (1 << 4) /* Export to ETM */
357 #define ARMV8_PMCR_DP           (1 << 5) /* Disable CCNT if non-invasive debug*/
358 #define ARMV8_PMCR_N_SHIFT      11       /* Number of counters supported */
359 #define ARMV8_PMCR_N_MASK       0x1f
360 #define ARMV8_PMCR_MASK         0x3f     /* Mask for writable bits */
361
362 /*
363  * PMOVSR: counters overflow flag status reg
364  */
365 #define ARMV8_OVSR_MASK         0xffffffff      /* Mask for writable bits */
366 #define ARMV8_OVERFLOWED_MASK   ARMV8_OVSR_MASK
367
368 /*
369  * PMXEVTYPER: Event selection reg
370  */
371 #define ARMV8_EVTYPE_MASK       0xc80003ff      /* Mask for writable bits */
372 #define ARMV8_EVTYPE_EVENT      0x3ff           /* Mask for EVENT bits */
373
374 /*
375  * Event filters for PMUv3
376  */
377 #define ARMV8_EXCLUDE_EL1       (1 << 31)
378 #define ARMV8_EXCLUDE_EL0       (1 << 30)
379 #define ARMV8_INCLUDE_EL2       (1 << 27)
380
381 static inline u32 armv8pmu_pmcr_read(void)
382 {
383         u32 val;
384         asm volatile("mrs %0, pmcr_el0" : "=r" (val));
385         return val;
386 }
387
388 static inline void armv8pmu_pmcr_write(u32 val)
389 {
390         val &= ARMV8_PMCR_MASK;
391         isb();
392         asm volatile("msr pmcr_el0, %0" :: "r" (val));
393 }
394
395 static inline int armv8pmu_has_overflowed(u32 pmovsr)
396 {
397         return pmovsr & ARMV8_OVERFLOWED_MASK;
398 }
399
400 static inline int armv8pmu_counter_valid(struct arm_pmu *cpu_pmu, int idx)
401 {
402         return idx >= ARMV8_IDX_CYCLE_COUNTER &&
403                 idx <= ARMV8_IDX_COUNTER_LAST(cpu_pmu);
404 }
405
406 static inline int armv8pmu_counter_has_overflowed(u32 pmnc, int idx)
407 {
408         return pmnc & BIT(ARMV8_IDX_TO_COUNTER(idx));
409 }
410
411 static inline int armv8pmu_select_counter(int idx)
412 {
413         u32 counter = ARMV8_IDX_TO_COUNTER(idx);
414         asm volatile("msr pmselr_el0, %0" :: "r" (counter));
415         isb();
416
417         return idx;
418 }
419
420 static inline u32 armv8pmu_read_counter(struct perf_event *event)
421 {
422         struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
423         struct hw_perf_event *hwc = &event->hw;
424         int idx = hwc->idx;
425         u32 value = 0;
426
427         if (!armv8pmu_counter_valid(cpu_pmu, idx))
428                 pr_err("CPU%u reading wrong counter %d\n",
429                         smp_processor_id(), idx);
430         else if (idx == ARMV8_IDX_CYCLE_COUNTER)
431                 asm volatile("mrs %0, pmccntr_el0" : "=r" (value));
432         else if (armv8pmu_select_counter(idx) == idx)
433                 asm volatile("mrs %0, pmxevcntr_el0" : "=r" (value));
434
435         return value;
436 }
437
438 static inline void armv8pmu_write_counter(struct perf_event *event, u32 value)
439 {
440         struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
441         struct hw_perf_event *hwc = &event->hw;
442         int idx = hwc->idx;
443
444         if (!armv8pmu_counter_valid(cpu_pmu, idx))
445                 pr_err("CPU%u writing wrong counter %d\n",
446                         smp_processor_id(), idx);
447         else if (idx == ARMV8_IDX_CYCLE_COUNTER)
448                 asm volatile("msr pmccntr_el0, %0" :: "r" (value));
449         else if (armv8pmu_select_counter(idx) == idx)
450                 asm volatile("msr pmxevcntr_el0, %0" :: "r" (value));
451 }
452
453 static inline void armv8pmu_write_evtype(int idx, u32 val)
454 {
455         if (armv8pmu_select_counter(idx) == idx) {
456                 val &= ARMV8_EVTYPE_MASK;
457                 asm volatile("msr pmxevtyper_el0, %0" :: "r" (val));
458         }
459 }
460
461 static inline int armv8pmu_enable_counter(int idx)
462 {
463         u32 counter = ARMV8_IDX_TO_COUNTER(idx);
464         asm volatile("msr pmcntenset_el0, %0" :: "r" (BIT(counter)));
465         return idx;
466 }
467
468 static inline int armv8pmu_disable_counter(int idx)
469 {
470         u32 counter = ARMV8_IDX_TO_COUNTER(idx);
471         asm volatile("msr pmcntenclr_el0, %0" :: "r" (BIT(counter)));
472         return idx;
473 }
474
475 static inline int armv8pmu_enable_intens(int idx)
476 {
477         u32 counter = ARMV8_IDX_TO_COUNTER(idx);
478         asm volatile("msr pmintenset_el1, %0" :: "r" (BIT(counter)));
479         return idx;
480 }
481
482 static inline int armv8pmu_disable_intens(int idx)
483 {
484         u32 counter = ARMV8_IDX_TO_COUNTER(idx);
485         asm volatile("msr pmintenclr_el1, %0" :: "r" (BIT(counter)));
486         isb();
487         /* Clear the overflow flag in case an interrupt is pending. */
488         asm volatile("msr pmovsclr_el0, %0" :: "r" (BIT(counter)));
489         isb();
490
491         return idx;
492 }
493
494 static inline u32 armv8pmu_getreset_flags(void)
495 {
496         u32 value;
497
498         /* Read */
499         asm volatile("mrs %0, pmovsclr_el0" : "=r" (value));
500
501         /* Write to clear flags */
502         value &= ARMV8_OVSR_MASK;
503         asm volatile("msr pmovsclr_el0, %0" :: "r" (value));
504
505         return value;
506 }
507
508 static void armv8pmu_enable_event(struct perf_event *event)
509 {
510         unsigned long flags;
511         struct hw_perf_event *hwc = &event->hw;
512         struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
513         struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
514         int idx = hwc->idx;
515
516         /*
517          * Enable counter and interrupt, and set the counter to count
518          * the event that we're interested in.
519          */
520         raw_spin_lock_irqsave(&events->pmu_lock, flags);
521
522         /*
523          * Disable counter
524          */
525         armv8pmu_disable_counter(idx);
526
527         /*
528          * Set event (if destined for PMNx counters).
529          */
530         armv8pmu_write_evtype(idx, hwc->config_base);
531
532         /*
533          * Enable interrupt for this counter
534          */
535         armv8pmu_enable_intens(idx);
536
537         /*
538          * Enable counter
539          */
540         armv8pmu_enable_counter(idx);
541
542         raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
543 }
544
545 static void armv8pmu_disable_event(struct perf_event *event)
546 {
547         unsigned long flags;
548         struct hw_perf_event *hwc = &event->hw;
549         struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
550         struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
551         int idx = hwc->idx;
552
553         /*
554          * Disable counter and interrupt
555          */
556         raw_spin_lock_irqsave(&events->pmu_lock, flags);
557
558         /*
559          * Disable counter
560          */
561         armv8pmu_disable_counter(idx);
562
563         /*
564          * Disable interrupt for this counter
565          */
566         armv8pmu_disable_intens(idx);
567
568         raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
569 }
570
571 static irqreturn_t armv8pmu_handle_irq(int irq_num, void *dev)
572 {
573         u32 pmovsr;
574         struct perf_sample_data data;
575         struct arm_pmu *cpu_pmu = (struct arm_pmu *)dev;
576         struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events);
577         struct pt_regs *regs;
578         int idx;
579
580         /*
581          * Get and reset the IRQ flags
582          */
583         pmovsr = armv8pmu_getreset_flags();
584
585         /*
586          * Did an overflow occur?
587          */
588         if (!armv8pmu_has_overflowed(pmovsr))
589                 return IRQ_NONE;
590
591         /*
592          * Handle the counter(s) overflow(s)
593          */
594         regs = get_irq_regs();
595
596         for (idx = 0; idx < cpu_pmu->num_events; ++idx) {
597                 struct perf_event *event = cpuc->events[idx];
598                 struct hw_perf_event *hwc;
599
600                 /* Ignore if we don't have an event. */
601                 if (!event)
602                         continue;
603
604                 /*
605                  * We have a single interrupt for all counters. Check that
606                  * each counter has overflowed before we process it.
607                  */
608                 if (!armv8pmu_counter_has_overflowed(pmovsr, idx))
609                         continue;
610
611                 hwc = &event->hw;
612                 armpmu_event_update(event);
613                 perf_sample_data_init(&data, 0, hwc->last_period);
614                 if (!armpmu_event_set_period(event))
615                         continue;
616
617                 if (perf_event_overflow(event, &data, regs))
618                         cpu_pmu->disable(event);
619         }
620
621         /*
622          * Handle the pending perf events.
623          *
624          * Note: this call *must* be run with interrupts disabled. For
625          * platforms that can have the PMU interrupts raised as an NMI, this
626          * will not work.
627          */
628         irq_work_run();
629
630         return IRQ_HANDLED;
631 }
632
633 static void armv8pmu_start(struct arm_pmu *cpu_pmu)
634 {
635         unsigned long flags;
636         struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
637
638         raw_spin_lock_irqsave(&events->pmu_lock, flags);
639         /* Enable all counters */
640         armv8pmu_pmcr_write(armv8pmu_pmcr_read() | ARMV8_PMCR_E);
641         raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
642 }
643
644 static void armv8pmu_stop(struct arm_pmu *cpu_pmu)
645 {
646         unsigned long flags;
647         struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
648
649         raw_spin_lock_irqsave(&events->pmu_lock, flags);
650         /* Disable all counters */
651         armv8pmu_pmcr_write(armv8pmu_pmcr_read() & ~ARMV8_PMCR_E);
652         raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
653 }
654
655 static int armv8pmu_get_event_idx(struct pmu_hw_events *cpuc,
656                                   struct perf_event *event)
657 {
658         int idx;
659         struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
660         struct hw_perf_event *hwc = &event->hw;
661         unsigned long evtype = hwc->config_base & ARMV8_EVTYPE_EVENT;
662
663         /* Always place a cycle counter into the cycle counter. */
664         if (evtype == ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES) {
665                 if (test_and_set_bit(ARMV8_IDX_CYCLE_COUNTER, cpuc->used_mask))
666                         return -EAGAIN;
667
668                 return ARMV8_IDX_CYCLE_COUNTER;
669         }
670
671         /*
672          * For anything other than a cycle counter, try and use
673          * the events counters
674          */
675         for (idx = ARMV8_IDX_COUNTER0; idx < cpu_pmu->num_events; ++idx) {
676                 if (!test_and_set_bit(idx, cpuc->used_mask))
677                         return idx;
678         }
679
680         /* The counters are all in use. */
681         return -EAGAIN;
682 }
683
684 /*
685  * Add an event filter to a given event. This will only work for PMUv2 PMUs.
686  */
687 static int armv8pmu_set_event_filter(struct hw_perf_event *event,
688                                      struct perf_event_attr *attr)
689 {
690         unsigned long config_base = 0;
691
692         if (attr->exclude_idle)
693                 return -EPERM;
694         if (attr->exclude_user)
695                 config_base |= ARMV8_EXCLUDE_EL0;
696         if (attr->exclude_kernel)
697                 config_base |= ARMV8_EXCLUDE_EL1;
698         if (!attr->exclude_hv)
699                 config_base |= ARMV8_INCLUDE_EL2;
700
701         /*
702          * Install the filter into config_base as this is used to
703          * construct the event type.
704          */
705         event->config_base = config_base;
706
707         return 0;
708 }
709
710 static void armv8pmu_reset(void *info)
711 {
712         struct arm_pmu *cpu_pmu = (struct arm_pmu *)info;
713         u32 idx, nb_cnt = cpu_pmu->num_events;
714
715         /* The counter and interrupt enable registers are unknown at reset. */
716         for (idx = ARMV8_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) {
717                 armv8pmu_disable_counter(idx);
718                 armv8pmu_disable_intens(idx);
719         }
720
721         /* Initialize & Reset PMNC: C and P bits. */
722         armv8pmu_pmcr_write(ARMV8_PMCR_P | ARMV8_PMCR_C);
723 }
724
725 static int armv8_pmuv3_map_event(struct perf_event *event)
726 {
727         return armpmu_map_event(event, &armv8_pmuv3_perf_map,
728                                 &armv8_pmuv3_perf_cache_map,
729                                 ARMV8_EVTYPE_EVENT);
730 }
731
732 static int armv8_a53_map_event(struct perf_event *event)
733 {
734         return armpmu_map_event(event, &armv8_a53_perf_map,
735                                 &armv8_a53_perf_cache_map,
736                                 ARMV8_EVTYPE_EVENT);
737 }
738
739 static int armv8_a57_map_event(struct perf_event *event)
740 {
741         return armpmu_map_event(event, &armv8_a57_perf_map,
742                                 &armv8_a57_perf_cache_map,
743                                 ARMV8_EVTYPE_EVENT);
744 }
745
746 static void armv8pmu_read_num_pmnc_events(void *info)
747 {
748         int *nb_cnt = info;
749
750         /* Read the nb of CNTx counters supported from PMNC */
751         *nb_cnt = (armv8pmu_pmcr_read() >> ARMV8_PMCR_N_SHIFT) & ARMV8_PMCR_N_MASK;
752
753         /* Add the CPU cycles counter */
754         *nb_cnt += 1;
755 }
756
757 static int armv8pmu_probe_num_events(struct arm_pmu *arm_pmu)
758 {
759         return smp_call_function_any(&arm_pmu->supported_cpus,
760                                     armv8pmu_read_num_pmnc_events,
761                                     &arm_pmu->num_events, 1);
762 }
763
764 static void armv8_pmu_init(struct arm_pmu *cpu_pmu)
765 {
766         cpu_pmu->handle_irq             = armv8pmu_handle_irq,
767         cpu_pmu->enable                 = armv8pmu_enable_event,
768         cpu_pmu->disable                = armv8pmu_disable_event,
769         cpu_pmu->read_counter           = armv8pmu_read_counter,
770         cpu_pmu->write_counter          = armv8pmu_write_counter,
771         cpu_pmu->get_event_idx          = armv8pmu_get_event_idx,
772         cpu_pmu->start                  = armv8pmu_start,
773         cpu_pmu->stop                   = armv8pmu_stop,
774         cpu_pmu->reset                  = armv8pmu_reset,
775         cpu_pmu->max_period             = (1LLU << 32) - 1,
776         cpu_pmu->set_event_filter       = armv8pmu_set_event_filter;
777 }
778
779 static int armv8_pmuv3_init(struct arm_pmu *cpu_pmu)
780 {
781         armv8_pmu_init(cpu_pmu);
782         cpu_pmu->name                   = "armv8_pmuv3";
783         cpu_pmu->map_event              = armv8_pmuv3_map_event;
784         return armv8pmu_probe_num_events(cpu_pmu);
785 }
786
787 static int armv8_a53_pmu_init(struct arm_pmu *cpu_pmu)
788 {
789         armv8_pmu_init(cpu_pmu);
790         cpu_pmu->name                   = "armv8_cortex_a53";
791         cpu_pmu->map_event              = armv8_a53_map_event;
792         cpu_pmu->pmu.attr_groups        = armv8_pmuv3_attr_groups;
793         return armv8pmu_probe_num_events(cpu_pmu);
794 }
795
796 static int armv8_a57_pmu_init(struct arm_pmu *cpu_pmu)
797 {
798         armv8_pmu_init(cpu_pmu);
799         cpu_pmu->name                   = "armv8_cortex_a57";
800         cpu_pmu->map_event              = armv8_a57_map_event;
801         cpu_pmu->pmu.attr_groups        = armv8_pmuv3_attr_groups;
802         return armv8pmu_probe_num_events(cpu_pmu);
803 }
804
805 static int armv8_a72_pmu_init(struct arm_pmu *cpu_pmu)
806 {
807         armv8_pmu_init(cpu_pmu);
808         cpu_pmu->name                   = "armv8_cortex_a72";
809         cpu_pmu->map_event              = armv8_a57_map_event;
810         cpu_pmu->pmu.attr_groups        = armv8_pmuv3_attr_groups;
811         return armv8pmu_probe_num_events(cpu_pmu);
812 }
813
814 static const struct of_device_id armv8_pmu_of_device_ids[] = {
815         {.compatible = "arm,armv8-pmuv3",       .data = armv8_pmuv3_init},
816         {.compatible = "arm,cortex-a53-pmu",    .data = armv8_a53_pmu_init},
817         {.compatible = "arm,cortex-a57-pmu",    .data = armv8_a57_pmu_init},
818         {.compatible = "arm,cortex-a72-pmu",    .data = armv8_a72_pmu_init},
819         {},
820 };
821
822 static int armv8_pmu_device_probe(struct platform_device *pdev)
823 {
824         return arm_pmu_device_probe(pdev, armv8_pmu_of_device_ids, NULL);
825 }
826
827 static struct platform_driver armv8_pmu_driver = {
828         .driver         = {
829                 .name   = "armv8-pmu",
830                 .of_match_table = armv8_pmu_of_device_ids,
831         },
832         .probe          = armv8_pmu_device_probe,
833 };
834
835 static int __init register_armv8_pmu_driver(void)
836 {
837         return platform_driver_register(&armv8_pmu_driver);
838 }
839 device_initcall(register_armv8_pmu_driver);