1cc61fc321d99ad02c219b5a1e9c7bbb6a9e05cf
[cascardo/linux.git] / arch / arm64 / kernel / perf_event.c
1 /*
2  * PMU support
3  *
4  * Copyright (C) 2012 ARM Limited
5  * Author: Will Deacon <will.deacon@arm.com>
6  *
7  * This code is based heavily on the ARMv7 perf event code.
8  *
9  * This program is free software; you can redistribute it and/or modify
10  * it under the terms of the GNU General Public License version 2 as
11  * published by the Free Software Foundation.
12  *
13  * This program is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16  * GNU General Public License for more details.
17  *
18  * You should have received a copy of the GNU General Public License
19  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
20  */
21
22 #include <asm/irq_regs.h>
23
24 #include <linux/of.h>
25 #include <linux/perf/arm_pmu.h>
26 #include <linux/platform_device.h>
27
28 /*
29  * ARMv8 PMUv3 Performance Events handling code.
30  * Common event types.
31  */
32
33 /* Required events. */
34 #define ARMV8_PMUV3_PERFCTR_PMNC_SW_INCR                        0x00
35 #define ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL                    0x03
36 #define ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS                    0x04
37 #define ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED                  0x10
38 #define ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES                        0x11
39 #define ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED                      0x12
40
41 /* At least one of the following is required. */
42 #define ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED                      0x08
43 #define ARMV8_PMUV3_PERFCTR_OP_SPEC                             0x1B
44
45 /* Common architectural events. */
46 #define ARMV8_PMUV3_PERFCTR_MEM_READ                            0x06
47 #define ARMV8_PMUV3_PERFCTR_MEM_WRITE                           0x07
48 #define ARMV8_PMUV3_PERFCTR_EXC_TAKEN                           0x09
49 #define ARMV8_PMUV3_PERFCTR_EXC_EXECUTED                        0x0A
50 #define ARMV8_PMUV3_PERFCTR_CID_WRITE                           0x0B
51 #define ARMV8_PMUV3_PERFCTR_PC_WRITE                            0x0C
52 #define ARMV8_PMUV3_PERFCTR_PC_IMM_BRANCH                       0x0D
53 #define ARMV8_PMUV3_PERFCTR_PC_PROC_RETURN                      0x0E
54 #define ARMV8_PMUV3_PERFCTR_MEM_UNALIGNED_ACCESS                0x0F
55 #define ARMV8_PMUV3_PERFCTR_TTBR_WRITE                          0x1C
56 #define ARMV8_PMUV3_PERFCTR_CHAIN                               0x1E
57 #define ARMV8_PMUV3_PERFCTR_BR_RETIRED                          0x21
58
59 /* Common microarchitectural events. */
60 #define ARMV8_PMUV3_PERFCTR_L1_ICACHE_REFILL                    0x01
61 #define ARMV8_PMUV3_PERFCTR_ITLB_REFILL                         0x02
62 #define ARMV8_PMUV3_PERFCTR_DTLB_REFILL                         0x05
63 #define ARMV8_PMUV3_PERFCTR_MEM_ACCESS                          0x13
64 #define ARMV8_PMUV3_PERFCTR_L1_ICACHE_ACCESS                    0x14
65 #define ARMV8_PMUV3_PERFCTR_L1_DCACHE_WB                        0x15
66 #define ARMV8_PMUV3_PERFCTR_L2_CACHE_ACCESS                     0x16
67 #define ARMV8_PMUV3_PERFCTR_L2_CACHE_REFILL                     0x17
68 #define ARMV8_PMUV3_PERFCTR_L2_CACHE_WB                         0x18
69 #define ARMV8_PMUV3_PERFCTR_BUS_ACCESS                          0x19
70 #define ARMV8_PMUV3_PERFCTR_MEM_ERROR                           0x1A
71 #define ARMV8_PMUV3_PERFCTR_BUS_CYCLES                          0x1D
72 #define ARMV8_PMUV3_PERFCTR_L1D_CACHE_ALLOCATE                  0x1F
73 #define ARMV8_PMUV3_PERFCTR_L2D_CACHE_ALLOCATE                  0x20
74 #define ARMV8_PMUV3_PERFCTR_BR_MIS_PRED_RETIRED                 0x22
75 #define ARMV8_PMUV3_PERFCTR_STALL_FRONTEND                      0x23
76 #define ARMV8_PMUV3_PERFCTR_STALL_BACKEND                       0x24
77 #define ARMV8_PMUV3_PERFCTR_L1D_TLB                             0x25
78 #define ARMV8_PMUV3_PERFCTR_L1I_TLB                             0x26
79 #define ARMV8_PMUV3_PERFCTR_L2I_CACHE                           0x27
80 #define ARMV8_PMUV3_PERFCTR_L2I_CACHE_REFILL                    0x28
81 #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_ALLOCATE                  0x29
82 #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_REFILL                    0x2A
83 #define ARMV8_PMUV3_PERFCTR_L3D_CACHE                           0x2B
84 #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_WB                        0x2C
85 #define ARMV8_PMUV3_PERFCTR_L2D_TLB_REFILL                      0x2D
86 #define ARMV8_PMUV3_PERFCTR_L21_TLB_REFILL                      0x2E
87 #define ARMV8_PMUV3_PERFCTR_L2D_TLB                             0x2F
88 #define ARMV8_PMUV3_PERFCTR_L21_TLB                             0x30
89
90 /* ARMv8 implementation defined event types. */
91 #define ARMV8_IMPDEF_PERFCTR_L1_DCACHE_ACCESS_LD                0x40
92 #define ARMV8_IMPDEF_PERFCTR_L1_DCACHE_ACCESS_ST                0x41
93 #define ARMV8_IMPDEF_PERFCTR_L1_DCACHE_REFILL_LD                0x42
94 #define ARMV8_IMPDEF_PERFCTR_L1_DCACHE_REFILL_ST                0x43
95 #define ARMV8_IMPDEF_PERFCTR_DTLB_REFILL_LD                     0x4C
96 #define ARMV8_IMPDEF_PERFCTR_DTLB_REFILL_ST                     0x4D
97 #define ARMV8_IMPDEF_PERFCTR_DTLB_ACCESS_LD                     0x4E
98 #define ARMV8_IMPDEF_PERFCTR_DTLB_ACCESS_ST                     0x4F
99
100 /* ARMv8 Cortex-A53 specific event types. */
101 #define ARMV8_A53_PERFCTR_PREFETCH_LINEFILL                     0xC2
102
103 /* ARMv8 Cavium ThunderX specific event types. */
104 #define ARMV8_THUNDER_PERFCTR_L1_DCACHE_MISS_ST                 0xE9
105 #define ARMV8_THUNDER_PERFCTR_L1_DCACHE_PREF_ACCESS             0xEA
106 #define ARMV8_THUNDER_PERFCTR_L1_DCACHE_PREF_MISS               0xEB
107 #define ARMV8_THUNDER_PERFCTR_L1_ICACHE_PREF_ACCESS             0xEC
108 #define ARMV8_THUNDER_PERFCTR_L1_ICACHE_PREF_MISS               0xED
109
110 /* PMUv3 HW events mapping. */
111 static const unsigned armv8_pmuv3_perf_map[PERF_COUNT_HW_MAX] = {
112         PERF_MAP_ALL_UNSUPPORTED,
113         [PERF_COUNT_HW_CPU_CYCLES]              = ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES,
114         [PERF_COUNT_HW_INSTRUCTIONS]            = ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED,
115         [PERF_COUNT_HW_CACHE_REFERENCES]        = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
116         [PERF_COUNT_HW_CACHE_MISSES]            = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
117         [PERF_COUNT_HW_BRANCH_MISSES]           = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
118 };
119
120 /* ARM Cortex-A53 HW events mapping. */
121 static const unsigned armv8_a53_perf_map[PERF_COUNT_HW_MAX] = {
122         PERF_MAP_ALL_UNSUPPORTED,
123         [PERF_COUNT_HW_CPU_CYCLES]              = ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES,
124         [PERF_COUNT_HW_INSTRUCTIONS]            = ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED,
125         [PERF_COUNT_HW_CACHE_REFERENCES]        = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
126         [PERF_COUNT_HW_CACHE_MISSES]            = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
127         [PERF_COUNT_HW_BRANCH_INSTRUCTIONS]     = ARMV8_PMUV3_PERFCTR_PC_WRITE,
128         [PERF_COUNT_HW_BRANCH_MISSES]           = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
129         [PERF_COUNT_HW_BUS_CYCLES]              = ARMV8_PMUV3_PERFCTR_BUS_CYCLES,
130 };
131
132 /* ARM Cortex-A57 and Cortex-A72 events mapping. */
133 static const unsigned armv8_a57_perf_map[PERF_COUNT_HW_MAX] = {
134         PERF_MAP_ALL_UNSUPPORTED,
135         [PERF_COUNT_HW_CPU_CYCLES]              = ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES,
136         [PERF_COUNT_HW_INSTRUCTIONS]            = ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED,
137         [PERF_COUNT_HW_CACHE_REFERENCES]        = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
138         [PERF_COUNT_HW_CACHE_MISSES]            = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
139         [PERF_COUNT_HW_BRANCH_MISSES]           = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
140         [PERF_COUNT_HW_BUS_CYCLES]              = ARMV8_PMUV3_PERFCTR_BUS_CYCLES,
141 };
142
143 static const unsigned armv8_thunder_perf_map[PERF_COUNT_HW_MAX] = {
144         PERF_MAP_ALL_UNSUPPORTED,
145         [PERF_COUNT_HW_CPU_CYCLES]              = ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES,
146         [PERF_COUNT_HW_INSTRUCTIONS]            = ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED,
147         [PERF_COUNT_HW_CACHE_REFERENCES]        = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
148         [PERF_COUNT_HW_CACHE_MISSES]            = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
149         [PERF_COUNT_HW_BRANCH_INSTRUCTIONS]     = ARMV8_PMUV3_PERFCTR_PC_WRITE,
150         [PERF_COUNT_HW_BRANCH_MISSES]           = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
151         [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV8_PMUV3_PERFCTR_STALL_FRONTEND,
152         [PERF_COUNT_HW_STALLED_CYCLES_BACKEND]  = ARMV8_PMUV3_PERFCTR_STALL_BACKEND,
153 };
154
155 static const unsigned armv8_pmuv3_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
156                                                 [PERF_COUNT_HW_CACHE_OP_MAX]
157                                                 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
158         PERF_CACHE_MAP_ALL_UNSUPPORTED,
159
160         [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
161         [C(L1D)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
162         [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
163         [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
164
165         [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
166         [C(BPU)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
167         [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
168         [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
169 };
170
171 static const unsigned armv8_a53_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
172                                               [PERF_COUNT_HW_CACHE_OP_MAX]
173                                               [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
174         PERF_CACHE_MAP_ALL_UNSUPPORTED,
175
176         [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
177         [C(L1D)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
178         [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
179         [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
180         [C(L1D)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV8_A53_PERFCTR_PREFETCH_LINEFILL,
181
182         [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_L1_ICACHE_ACCESS,
183         [C(L1I)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_L1_ICACHE_REFILL,
184
185         [C(ITLB)][C(OP_READ)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_ITLB_REFILL,
186
187         [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
188         [C(BPU)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
189         [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
190         [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
191 };
192
193 static const unsigned armv8_a57_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
194                                               [PERF_COUNT_HW_CACHE_OP_MAX]
195                                               [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
196         PERF_CACHE_MAP_ALL_UNSUPPORTED,
197
198         [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_IMPDEF_PERFCTR_L1_DCACHE_ACCESS_LD,
199         [C(L1D)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_IMPDEF_PERFCTR_L1_DCACHE_REFILL_LD,
200         [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1_DCACHE_ACCESS_ST,
201         [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_IMPDEF_PERFCTR_L1_DCACHE_REFILL_ST,
202
203         [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_L1_ICACHE_ACCESS,
204         [C(L1I)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_L1_ICACHE_REFILL,
205
206         [C(DTLB)][C(OP_READ)][C(RESULT_MISS)]   = ARMV8_IMPDEF_PERFCTR_DTLB_REFILL_LD,
207         [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)]  = ARMV8_IMPDEF_PERFCTR_DTLB_REFILL_ST,
208
209         [C(ITLB)][C(OP_READ)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_ITLB_REFILL,
210
211         [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
212         [C(BPU)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
213         [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
214         [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
215 };
216
217 static const unsigned armv8_thunder_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
218                                                    [PERF_COUNT_HW_CACHE_OP_MAX]
219                                                    [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
220         PERF_CACHE_MAP_ALL_UNSUPPORTED,
221
222         [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_IMPDEF_PERFCTR_L1_DCACHE_ACCESS_LD,
223         [C(L1D)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_IMPDEF_PERFCTR_L1_DCACHE_REFILL_LD,
224         [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1_DCACHE_ACCESS_ST,
225         [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_THUNDER_PERFCTR_L1_DCACHE_MISS_ST,
226         [C(L1D)][C(OP_PREFETCH)][C(RESULT_ACCESS)] = ARMV8_THUNDER_PERFCTR_L1_DCACHE_PREF_ACCESS,
227         [C(L1D)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV8_THUNDER_PERFCTR_L1_DCACHE_PREF_MISS,
228
229         [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_L1_ICACHE_ACCESS,
230         [C(L1I)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_L1_ICACHE_REFILL,
231         [C(L1I)][C(OP_PREFETCH)][C(RESULT_ACCESS)] = ARMV8_THUNDER_PERFCTR_L1_ICACHE_PREF_ACCESS,
232         [C(L1I)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV8_THUNDER_PERFCTR_L1_ICACHE_PREF_MISS,
233
234         [C(DTLB)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_DTLB_ACCESS_LD,
235         [C(DTLB)][C(OP_READ)][C(RESULT_MISS)]   = ARMV8_IMPDEF_PERFCTR_DTLB_REFILL_LD,
236         [C(DTLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_DTLB_ACCESS_ST,
237         [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)]  = ARMV8_IMPDEF_PERFCTR_DTLB_REFILL_ST,
238
239         [C(ITLB)][C(OP_READ)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_ITLB_REFILL,
240
241         [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
242         [C(BPU)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
243         [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
244         [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
245 };
246
247 #define ARMV8_EVENT_ATTR_RESOLVE(m) #m
248 #define ARMV8_EVENT_ATTR(name, config) \
249         PMU_EVENT_ATTR_STRING(name, armv8_event_attr_##name, \
250                               "event=" ARMV8_EVENT_ATTR_RESOLVE(config))
251
252 ARMV8_EVENT_ATTR(sw_incr, ARMV8_PMUV3_PERFCTR_PMNC_SW_INCR);
253 ARMV8_EVENT_ATTR(l1i_cache_refill, ARMV8_PMUV3_PERFCTR_L1_ICACHE_REFILL);
254 ARMV8_EVENT_ATTR(l1i_tlb_refill, ARMV8_PMUV3_PERFCTR_ITLB_REFILL);
255 ARMV8_EVENT_ATTR(l1d_cache_refill, ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL);
256 ARMV8_EVENT_ATTR(l1d_cache, ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS);
257 ARMV8_EVENT_ATTR(l1d_tlb_refill, ARMV8_PMUV3_PERFCTR_DTLB_REFILL);
258 ARMV8_EVENT_ATTR(ld_retired, ARMV8_PMUV3_PERFCTR_MEM_READ);
259 ARMV8_EVENT_ATTR(st_retired, ARMV8_PMUV3_PERFCTR_MEM_WRITE);
260 ARMV8_EVENT_ATTR(inst_retired, ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED);
261 ARMV8_EVENT_ATTR(exc_taken, ARMV8_PMUV3_PERFCTR_EXC_TAKEN);
262 ARMV8_EVENT_ATTR(exc_return, ARMV8_PMUV3_PERFCTR_EXC_EXECUTED);
263 ARMV8_EVENT_ATTR(cid_write_retired, ARMV8_PMUV3_PERFCTR_CID_WRITE);
264 ARMV8_EVENT_ATTR(pc_write_retired, ARMV8_PMUV3_PERFCTR_PC_WRITE);
265 ARMV8_EVENT_ATTR(br_immed_retired, ARMV8_PMUV3_PERFCTR_PC_IMM_BRANCH);
266 ARMV8_EVENT_ATTR(br_return_retired, ARMV8_PMUV3_PERFCTR_PC_PROC_RETURN);
267 ARMV8_EVENT_ATTR(unaligned_ldst_retired, ARMV8_PMUV3_PERFCTR_MEM_UNALIGNED_ACCESS);
268 ARMV8_EVENT_ATTR(br_mis_pred, ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED);
269 ARMV8_EVENT_ATTR(cpu_cycles, ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES);
270 ARMV8_EVENT_ATTR(br_pred, ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED);
271 ARMV8_EVENT_ATTR(mem_access, ARMV8_PMUV3_PERFCTR_MEM_ACCESS);
272 ARMV8_EVENT_ATTR(l1i_cache, ARMV8_PMUV3_PERFCTR_L1_ICACHE_ACCESS);
273 ARMV8_EVENT_ATTR(l1d_cache_wb, ARMV8_PMUV3_PERFCTR_L1_DCACHE_WB);
274 ARMV8_EVENT_ATTR(l2d_cache, ARMV8_PMUV3_PERFCTR_L2_CACHE_ACCESS);
275 ARMV8_EVENT_ATTR(l2d_cache_refill, ARMV8_PMUV3_PERFCTR_L2_CACHE_REFILL);
276 ARMV8_EVENT_ATTR(l2d_cache_wb, ARMV8_PMUV3_PERFCTR_L2_CACHE_WB);
277 ARMV8_EVENT_ATTR(bus_access, ARMV8_PMUV3_PERFCTR_BUS_ACCESS);
278 ARMV8_EVENT_ATTR(memory_error, ARMV8_PMUV3_PERFCTR_MEM_ERROR);
279 ARMV8_EVENT_ATTR(inst_spec, ARMV8_PMUV3_PERFCTR_OP_SPEC);
280 ARMV8_EVENT_ATTR(ttbr_write_retired, ARMV8_PMUV3_PERFCTR_TTBR_WRITE);
281 ARMV8_EVENT_ATTR(bus_cycles, ARMV8_PMUV3_PERFCTR_BUS_CYCLES);
282 ARMV8_EVENT_ATTR(chain, ARMV8_PMUV3_PERFCTR_CHAIN);
283 ARMV8_EVENT_ATTR(l1d_cache_allocate, ARMV8_PMUV3_PERFCTR_L1D_CACHE_ALLOCATE);
284 ARMV8_EVENT_ATTR(l2d_cache_allocate, ARMV8_PMUV3_PERFCTR_L2D_CACHE_ALLOCATE);
285 ARMV8_EVENT_ATTR(br_retired, ARMV8_PMUV3_PERFCTR_BR_RETIRED);
286 ARMV8_EVENT_ATTR(br_mis_pred_retired, ARMV8_PMUV3_PERFCTR_BR_MIS_PRED_RETIRED);
287 ARMV8_EVENT_ATTR(stall_frontend, ARMV8_PMUV3_PERFCTR_STALL_FRONTEND);
288 ARMV8_EVENT_ATTR(stall_backend, ARMV8_PMUV3_PERFCTR_STALL_BACKEND);
289 ARMV8_EVENT_ATTR(l1d_tlb, ARMV8_PMUV3_PERFCTR_L1D_TLB);
290 ARMV8_EVENT_ATTR(l1i_tlb, ARMV8_PMUV3_PERFCTR_L1I_TLB);
291 ARMV8_EVENT_ATTR(l2i_cache, ARMV8_PMUV3_PERFCTR_L2I_CACHE);
292 ARMV8_EVENT_ATTR(l2i_cache_refill, ARMV8_PMUV3_PERFCTR_L2I_CACHE_REFILL);
293 ARMV8_EVENT_ATTR(l3d_cache_allocate, ARMV8_PMUV3_PERFCTR_L3D_CACHE_ALLOCATE);
294 ARMV8_EVENT_ATTR(l3d_cache_refill, ARMV8_PMUV3_PERFCTR_L3D_CACHE_REFILL);
295 ARMV8_EVENT_ATTR(l3d_cache, ARMV8_PMUV3_PERFCTR_L3D_CACHE);
296 ARMV8_EVENT_ATTR(l3d_cache_wb, ARMV8_PMUV3_PERFCTR_L3D_CACHE_WB);
297 ARMV8_EVENT_ATTR(l2d_tlb_refill, ARMV8_PMUV3_PERFCTR_L2D_TLB_REFILL);
298 ARMV8_EVENT_ATTR(l21_tlb_refill, ARMV8_PMUV3_PERFCTR_L21_TLB_REFILL);
299 ARMV8_EVENT_ATTR(l2d_tlb, ARMV8_PMUV3_PERFCTR_L2D_TLB);
300 ARMV8_EVENT_ATTR(l21_tlb, ARMV8_PMUV3_PERFCTR_L21_TLB);
301
302 static struct attribute *armv8_pmuv3_event_attrs[] = {
303         &armv8_event_attr_sw_incr.attr.attr,
304         &armv8_event_attr_l1i_cache_refill.attr.attr,
305         &armv8_event_attr_l1i_tlb_refill.attr.attr,
306         &armv8_event_attr_l1d_cache_refill.attr.attr,
307         &armv8_event_attr_l1d_cache.attr.attr,
308         &armv8_event_attr_l1d_tlb_refill.attr.attr,
309         &armv8_event_attr_ld_retired.attr.attr,
310         &armv8_event_attr_st_retired.attr.attr,
311         &armv8_event_attr_inst_retired.attr.attr,
312         &armv8_event_attr_exc_taken.attr.attr,
313         &armv8_event_attr_exc_return.attr.attr,
314         &armv8_event_attr_cid_write_retired.attr.attr,
315         &armv8_event_attr_pc_write_retired.attr.attr,
316         &armv8_event_attr_br_immed_retired.attr.attr,
317         &armv8_event_attr_br_return_retired.attr.attr,
318         &armv8_event_attr_unaligned_ldst_retired.attr.attr,
319         &armv8_event_attr_br_mis_pred.attr.attr,
320         &armv8_event_attr_cpu_cycles.attr.attr,
321         &armv8_event_attr_br_pred.attr.attr,
322         &armv8_event_attr_mem_access.attr.attr,
323         &armv8_event_attr_l1i_cache.attr.attr,
324         &armv8_event_attr_l1d_cache_wb.attr.attr,
325         &armv8_event_attr_l2d_cache.attr.attr,
326         &armv8_event_attr_l2d_cache_refill.attr.attr,
327         &armv8_event_attr_l2d_cache_wb.attr.attr,
328         &armv8_event_attr_bus_access.attr.attr,
329         &armv8_event_attr_memory_error.attr.attr,
330         &armv8_event_attr_inst_spec.attr.attr,
331         &armv8_event_attr_ttbr_write_retired.attr.attr,
332         &armv8_event_attr_bus_cycles.attr.attr,
333         &armv8_event_attr_chain.attr.attr,
334         &armv8_event_attr_l1d_cache_allocate.attr.attr,
335         &armv8_event_attr_l2d_cache_allocate.attr.attr,
336         &armv8_event_attr_br_retired.attr.attr,
337         &armv8_event_attr_br_mis_pred_retired.attr.attr,
338         &armv8_event_attr_stall_frontend.attr.attr,
339         &armv8_event_attr_stall_backend.attr.attr,
340         &armv8_event_attr_l1d_tlb.attr.attr,
341         &armv8_event_attr_l1i_tlb.attr.attr,
342         &armv8_event_attr_l2i_cache.attr.attr,
343         &armv8_event_attr_l2i_cache_refill.attr.attr,
344         &armv8_event_attr_l3d_cache_allocate.attr.attr,
345         &armv8_event_attr_l3d_cache_refill.attr.attr,
346         &armv8_event_attr_l3d_cache.attr.attr,
347         &armv8_event_attr_l3d_cache_wb.attr.attr,
348         &armv8_event_attr_l2d_tlb_refill.attr.attr,
349         &armv8_event_attr_l21_tlb_refill.attr.attr,
350         &armv8_event_attr_l2d_tlb.attr.attr,
351         &armv8_event_attr_l21_tlb.attr.attr,
352         NULL,
353 };
354
355 static struct attribute_group armv8_pmuv3_events_attr_group = {
356         .name = "events",
357         .attrs = armv8_pmuv3_event_attrs,
358 };
359
360 PMU_FORMAT_ATTR(event, "config:0-9");
361
362 static struct attribute *armv8_pmuv3_format_attrs[] = {
363         &format_attr_event.attr,
364         NULL,
365 };
366
367 static struct attribute_group armv8_pmuv3_format_attr_group = {
368         .name = "format",
369         .attrs = armv8_pmuv3_format_attrs,
370 };
371
372 static const struct attribute_group *armv8_pmuv3_attr_groups[] = {
373         &armv8_pmuv3_events_attr_group,
374         &armv8_pmuv3_format_attr_group,
375         NULL,
376 };
377
378 /*
379  * Perf Events' indices
380  */
381 #define ARMV8_IDX_CYCLE_COUNTER 0
382 #define ARMV8_IDX_COUNTER0      1
383 #define ARMV8_IDX_COUNTER_LAST(cpu_pmu) \
384         (ARMV8_IDX_CYCLE_COUNTER + cpu_pmu->num_events - 1)
385
386 #define ARMV8_MAX_COUNTERS      32
387 #define ARMV8_COUNTER_MASK      (ARMV8_MAX_COUNTERS - 1)
388
389 /*
390  * ARMv8 low level PMU access
391  */
392
393 /*
394  * Perf Event to low level counters mapping
395  */
396 #define ARMV8_IDX_TO_COUNTER(x) \
397         (((x) - ARMV8_IDX_COUNTER0) & ARMV8_COUNTER_MASK)
398
399 /*
400  * Per-CPU PMCR: config reg
401  */
402 #define ARMV8_PMCR_E            (1 << 0) /* Enable all counters */
403 #define ARMV8_PMCR_P            (1 << 1) /* Reset all counters */
404 #define ARMV8_PMCR_C            (1 << 2) /* Cycle counter reset */
405 #define ARMV8_PMCR_D            (1 << 3) /* CCNT counts every 64th cpu cycle */
406 #define ARMV8_PMCR_X            (1 << 4) /* Export to ETM */
407 #define ARMV8_PMCR_DP           (1 << 5) /* Disable CCNT if non-invasive debug*/
408 #define ARMV8_PMCR_LC           (1 << 6) /* Overflow on 64 bit cycle counter */
409 #define ARMV8_PMCR_N_SHIFT      11       /* Number of counters supported */
410 #define ARMV8_PMCR_N_MASK       0x1f
411 #define ARMV8_PMCR_MASK         0x3f     /* Mask for writable bits */
412
413 /*
414  * PMOVSR: counters overflow flag status reg
415  */
416 #define ARMV8_OVSR_MASK         0xffffffff      /* Mask for writable bits */
417 #define ARMV8_OVERFLOWED_MASK   ARMV8_OVSR_MASK
418
419 /*
420  * PMXEVTYPER: Event selection reg
421  */
422 #define ARMV8_EVTYPE_MASK       0xc800ffff      /* Mask for writable bits */
423 #define ARMV8_EVTYPE_EVENT      0xffff          /* Mask for EVENT bits */
424
425 /*
426  * Event filters for PMUv3
427  */
428 #define ARMV8_EXCLUDE_EL1       (1 << 31)
429 #define ARMV8_EXCLUDE_EL0       (1 << 30)
430 #define ARMV8_INCLUDE_EL2       (1 << 27)
431
432 static inline u32 armv8pmu_pmcr_read(void)
433 {
434         u32 val;
435         asm volatile("mrs %0, pmcr_el0" : "=r" (val));
436         return val;
437 }
438
439 static inline void armv8pmu_pmcr_write(u32 val)
440 {
441         val &= ARMV8_PMCR_MASK;
442         isb();
443         asm volatile("msr pmcr_el0, %0" :: "r" (val));
444 }
445
446 static inline int armv8pmu_has_overflowed(u32 pmovsr)
447 {
448         return pmovsr & ARMV8_OVERFLOWED_MASK;
449 }
450
451 static inline int armv8pmu_counter_valid(struct arm_pmu *cpu_pmu, int idx)
452 {
453         return idx >= ARMV8_IDX_CYCLE_COUNTER &&
454                 idx <= ARMV8_IDX_COUNTER_LAST(cpu_pmu);
455 }
456
457 static inline int armv8pmu_counter_has_overflowed(u32 pmnc, int idx)
458 {
459         return pmnc & BIT(ARMV8_IDX_TO_COUNTER(idx));
460 }
461
462 static inline int armv8pmu_select_counter(int idx)
463 {
464         u32 counter = ARMV8_IDX_TO_COUNTER(idx);
465         asm volatile("msr pmselr_el0, %0" :: "r" (counter));
466         isb();
467
468         return idx;
469 }
470
471 static inline u32 armv8pmu_read_counter(struct perf_event *event)
472 {
473         struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
474         struct hw_perf_event *hwc = &event->hw;
475         int idx = hwc->idx;
476         u32 value = 0;
477
478         if (!armv8pmu_counter_valid(cpu_pmu, idx))
479                 pr_err("CPU%u reading wrong counter %d\n",
480                         smp_processor_id(), idx);
481         else if (idx == ARMV8_IDX_CYCLE_COUNTER)
482                 asm volatile("mrs %0, pmccntr_el0" : "=r" (value));
483         else if (armv8pmu_select_counter(idx) == idx)
484                 asm volatile("mrs %0, pmxevcntr_el0" : "=r" (value));
485
486         return value;
487 }
488
489 static inline void armv8pmu_write_counter(struct perf_event *event, u32 value)
490 {
491         struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
492         struct hw_perf_event *hwc = &event->hw;
493         int idx = hwc->idx;
494
495         if (!armv8pmu_counter_valid(cpu_pmu, idx))
496                 pr_err("CPU%u writing wrong counter %d\n",
497                         smp_processor_id(), idx);
498         else if (idx == ARMV8_IDX_CYCLE_COUNTER) {
499                 /*
500                  * Set the upper 32bits as this is a 64bit counter but we only
501                  * count using the lower 32bits and we want an interrupt when
502                  * it overflows.
503                  */
504                 u64 value64 = 0xffffffff00000000ULL | value;
505
506                 asm volatile("msr pmccntr_el0, %0" :: "r" (value64));
507         } else if (armv8pmu_select_counter(idx) == idx)
508                 asm volatile("msr pmxevcntr_el0, %0" :: "r" (value));
509 }
510
511 static inline void armv8pmu_write_evtype(int idx, u32 val)
512 {
513         if (armv8pmu_select_counter(idx) == idx) {
514                 val &= ARMV8_EVTYPE_MASK;
515                 asm volatile("msr pmxevtyper_el0, %0" :: "r" (val));
516         }
517 }
518
519 static inline int armv8pmu_enable_counter(int idx)
520 {
521         u32 counter = ARMV8_IDX_TO_COUNTER(idx);
522         asm volatile("msr pmcntenset_el0, %0" :: "r" (BIT(counter)));
523         return idx;
524 }
525
526 static inline int armv8pmu_disable_counter(int idx)
527 {
528         u32 counter = ARMV8_IDX_TO_COUNTER(idx);
529         asm volatile("msr pmcntenclr_el0, %0" :: "r" (BIT(counter)));
530         return idx;
531 }
532
533 static inline int armv8pmu_enable_intens(int idx)
534 {
535         u32 counter = ARMV8_IDX_TO_COUNTER(idx);
536         asm volatile("msr pmintenset_el1, %0" :: "r" (BIT(counter)));
537         return idx;
538 }
539
540 static inline int armv8pmu_disable_intens(int idx)
541 {
542         u32 counter = ARMV8_IDX_TO_COUNTER(idx);
543         asm volatile("msr pmintenclr_el1, %0" :: "r" (BIT(counter)));
544         isb();
545         /* Clear the overflow flag in case an interrupt is pending. */
546         asm volatile("msr pmovsclr_el0, %0" :: "r" (BIT(counter)));
547         isb();
548
549         return idx;
550 }
551
552 static inline u32 armv8pmu_getreset_flags(void)
553 {
554         u32 value;
555
556         /* Read */
557         asm volatile("mrs %0, pmovsclr_el0" : "=r" (value));
558
559         /* Write to clear flags */
560         value &= ARMV8_OVSR_MASK;
561         asm volatile("msr pmovsclr_el0, %0" :: "r" (value));
562
563         return value;
564 }
565
566 static void armv8pmu_enable_event(struct perf_event *event)
567 {
568         unsigned long flags;
569         struct hw_perf_event *hwc = &event->hw;
570         struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
571         struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
572         int idx = hwc->idx;
573
574         /*
575          * Enable counter and interrupt, and set the counter to count
576          * the event that we're interested in.
577          */
578         raw_spin_lock_irqsave(&events->pmu_lock, flags);
579
580         /*
581          * Disable counter
582          */
583         armv8pmu_disable_counter(idx);
584
585         /*
586          * Set event (if destined for PMNx counters).
587          */
588         armv8pmu_write_evtype(idx, hwc->config_base);
589
590         /*
591          * Enable interrupt for this counter
592          */
593         armv8pmu_enable_intens(idx);
594
595         /*
596          * Enable counter
597          */
598         armv8pmu_enable_counter(idx);
599
600         raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
601 }
602
603 static void armv8pmu_disable_event(struct perf_event *event)
604 {
605         unsigned long flags;
606         struct hw_perf_event *hwc = &event->hw;
607         struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
608         struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
609         int idx = hwc->idx;
610
611         /*
612          * Disable counter and interrupt
613          */
614         raw_spin_lock_irqsave(&events->pmu_lock, flags);
615
616         /*
617          * Disable counter
618          */
619         armv8pmu_disable_counter(idx);
620
621         /*
622          * Disable interrupt for this counter
623          */
624         armv8pmu_disable_intens(idx);
625
626         raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
627 }
628
629 static irqreturn_t armv8pmu_handle_irq(int irq_num, void *dev)
630 {
631         u32 pmovsr;
632         struct perf_sample_data data;
633         struct arm_pmu *cpu_pmu = (struct arm_pmu *)dev;
634         struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events);
635         struct pt_regs *regs;
636         int idx;
637
638         /*
639          * Get and reset the IRQ flags
640          */
641         pmovsr = armv8pmu_getreset_flags();
642
643         /*
644          * Did an overflow occur?
645          */
646         if (!armv8pmu_has_overflowed(pmovsr))
647                 return IRQ_NONE;
648
649         /*
650          * Handle the counter(s) overflow(s)
651          */
652         regs = get_irq_regs();
653
654         for (idx = 0; idx < cpu_pmu->num_events; ++idx) {
655                 struct perf_event *event = cpuc->events[idx];
656                 struct hw_perf_event *hwc;
657
658                 /* Ignore if we don't have an event. */
659                 if (!event)
660                         continue;
661
662                 /*
663                  * We have a single interrupt for all counters. Check that
664                  * each counter has overflowed before we process it.
665                  */
666                 if (!armv8pmu_counter_has_overflowed(pmovsr, idx))
667                         continue;
668
669                 hwc = &event->hw;
670                 armpmu_event_update(event);
671                 perf_sample_data_init(&data, 0, hwc->last_period);
672                 if (!armpmu_event_set_period(event))
673                         continue;
674
675                 if (perf_event_overflow(event, &data, regs))
676                         cpu_pmu->disable(event);
677         }
678
679         /*
680          * Handle the pending perf events.
681          *
682          * Note: this call *must* be run with interrupts disabled. For
683          * platforms that can have the PMU interrupts raised as an NMI, this
684          * will not work.
685          */
686         irq_work_run();
687
688         return IRQ_HANDLED;
689 }
690
691 static void armv8pmu_start(struct arm_pmu *cpu_pmu)
692 {
693         unsigned long flags;
694         struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
695
696         raw_spin_lock_irqsave(&events->pmu_lock, flags);
697         /* Enable all counters */
698         armv8pmu_pmcr_write(armv8pmu_pmcr_read() | ARMV8_PMCR_E);
699         raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
700 }
701
702 static void armv8pmu_stop(struct arm_pmu *cpu_pmu)
703 {
704         unsigned long flags;
705         struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
706
707         raw_spin_lock_irqsave(&events->pmu_lock, flags);
708         /* Disable all counters */
709         armv8pmu_pmcr_write(armv8pmu_pmcr_read() & ~ARMV8_PMCR_E);
710         raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
711 }
712
713 static int armv8pmu_get_event_idx(struct pmu_hw_events *cpuc,
714                                   struct perf_event *event)
715 {
716         int idx;
717         struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
718         struct hw_perf_event *hwc = &event->hw;
719         unsigned long evtype = hwc->config_base & ARMV8_EVTYPE_EVENT;
720
721         /* Always place a cycle counter into the cycle counter. */
722         if (evtype == ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES) {
723                 if (test_and_set_bit(ARMV8_IDX_CYCLE_COUNTER, cpuc->used_mask))
724                         return -EAGAIN;
725
726                 return ARMV8_IDX_CYCLE_COUNTER;
727         }
728
729         /*
730          * For anything other than a cycle counter, try and use
731          * the events counters
732          */
733         for (idx = ARMV8_IDX_COUNTER0; idx < cpu_pmu->num_events; ++idx) {
734                 if (!test_and_set_bit(idx, cpuc->used_mask))
735                         return idx;
736         }
737
738         /* The counters are all in use. */
739         return -EAGAIN;
740 }
741
742 /*
743  * Add an event filter to a given event. This will only work for PMUv2 PMUs.
744  */
745 static int armv8pmu_set_event_filter(struct hw_perf_event *event,
746                                      struct perf_event_attr *attr)
747 {
748         unsigned long config_base = 0;
749
750         if (attr->exclude_idle)
751                 return -EPERM;
752         if (attr->exclude_user)
753                 config_base |= ARMV8_EXCLUDE_EL0;
754         if (attr->exclude_kernel)
755                 config_base |= ARMV8_EXCLUDE_EL1;
756         if (!attr->exclude_hv)
757                 config_base |= ARMV8_INCLUDE_EL2;
758
759         /*
760          * Install the filter into config_base as this is used to
761          * construct the event type.
762          */
763         event->config_base = config_base;
764
765         return 0;
766 }
767
768 static void armv8pmu_reset(void *info)
769 {
770         struct arm_pmu *cpu_pmu = (struct arm_pmu *)info;
771         u32 idx, nb_cnt = cpu_pmu->num_events;
772
773         /* The counter and interrupt enable registers are unknown at reset. */
774         for (idx = ARMV8_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) {
775                 armv8pmu_disable_counter(idx);
776                 armv8pmu_disable_intens(idx);
777         }
778
779         /*
780          * Initialize & Reset PMNC. Request overflow interrupt for
781          * 64 bit cycle counter but cheat in armv8pmu_write_counter().
782          */
783         armv8pmu_pmcr_write(ARMV8_PMCR_P | ARMV8_PMCR_C | ARMV8_PMCR_LC);
784 }
785
786 static int armv8_pmuv3_map_event(struct perf_event *event)
787 {
788         return armpmu_map_event(event, &armv8_pmuv3_perf_map,
789                                 &armv8_pmuv3_perf_cache_map,
790                                 ARMV8_EVTYPE_EVENT);
791 }
792
793 static int armv8_a53_map_event(struct perf_event *event)
794 {
795         return armpmu_map_event(event, &armv8_a53_perf_map,
796                                 &armv8_a53_perf_cache_map,
797                                 ARMV8_EVTYPE_EVENT);
798 }
799
800 static int armv8_a57_map_event(struct perf_event *event)
801 {
802         return armpmu_map_event(event, &armv8_a57_perf_map,
803                                 &armv8_a57_perf_cache_map,
804                                 ARMV8_EVTYPE_EVENT);
805 }
806
807 static int armv8_thunder_map_event(struct perf_event *event)
808 {
809         return armpmu_map_event(event, &armv8_thunder_perf_map,
810                                 &armv8_thunder_perf_cache_map,
811                                 ARMV8_EVTYPE_EVENT);
812 }
813
814 static void armv8pmu_read_num_pmnc_events(void *info)
815 {
816         int *nb_cnt = info;
817
818         /* Read the nb of CNTx counters supported from PMNC */
819         *nb_cnt = (armv8pmu_pmcr_read() >> ARMV8_PMCR_N_SHIFT) & ARMV8_PMCR_N_MASK;
820
821         /* Add the CPU cycles counter */
822         *nb_cnt += 1;
823 }
824
825 static int armv8pmu_probe_num_events(struct arm_pmu *arm_pmu)
826 {
827         return smp_call_function_any(&arm_pmu->supported_cpus,
828                                     armv8pmu_read_num_pmnc_events,
829                                     &arm_pmu->num_events, 1);
830 }
831
832 static void armv8_pmu_init(struct arm_pmu *cpu_pmu)
833 {
834         cpu_pmu->handle_irq             = armv8pmu_handle_irq,
835         cpu_pmu->enable                 = armv8pmu_enable_event,
836         cpu_pmu->disable                = armv8pmu_disable_event,
837         cpu_pmu->read_counter           = armv8pmu_read_counter,
838         cpu_pmu->write_counter          = armv8pmu_write_counter,
839         cpu_pmu->get_event_idx          = armv8pmu_get_event_idx,
840         cpu_pmu->start                  = armv8pmu_start,
841         cpu_pmu->stop                   = armv8pmu_stop,
842         cpu_pmu->reset                  = armv8pmu_reset,
843         cpu_pmu->max_period             = (1LLU << 32) - 1,
844         cpu_pmu->set_event_filter       = armv8pmu_set_event_filter;
845 }
846
847 static int armv8_pmuv3_init(struct arm_pmu *cpu_pmu)
848 {
849         armv8_pmu_init(cpu_pmu);
850         cpu_pmu->name                   = "armv8_pmuv3";
851         cpu_pmu->map_event              = armv8_pmuv3_map_event;
852         return armv8pmu_probe_num_events(cpu_pmu);
853 }
854
855 static int armv8_a53_pmu_init(struct arm_pmu *cpu_pmu)
856 {
857         armv8_pmu_init(cpu_pmu);
858         cpu_pmu->name                   = "armv8_cortex_a53";
859         cpu_pmu->map_event              = armv8_a53_map_event;
860         cpu_pmu->pmu.attr_groups        = armv8_pmuv3_attr_groups;
861         return armv8pmu_probe_num_events(cpu_pmu);
862 }
863
864 static int armv8_a57_pmu_init(struct arm_pmu *cpu_pmu)
865 {
866         armv8_pmu_init(cpu_pmu);
867         cpu_pmu->name                   = "armv8_cortex_a57";
868         cpu_pmu->map_event              = armv8_a57_map_event;
869         cpu_pmu->pmu.attr_groups        = armv8_pmuv3_attr_groups;
870         return armv8pmu_probe_num_events(cpu_pmu);
871 }
872
873 static int armv8_a72_pmu_init(struct arm_pmu *cpu_pmu)
874 {
875         armv8_pmu_init(cpu_pmu);
876         cpu_pmu->name                   = "armv8_cortex_a72";
877         cpu_pmu->map_event              = armv8_a57_map_event;
878         cpu_pmu->pmu.attr_groups        = armv8_pmuv3_attr_groups;
879         return armv8pmu_probe_num_events(cpu_pmu);
880 }
881
882 static int armv8_thunder_pmu_init(struct arm_pmu *cpu_pmu)
883 {
884         armv8_pmu_init(cpu_pmu);
885         cpu_pmu->name                   = "armv8_cavium_thunder";
886         cpu_pmu->map_event              = armv8_thunder_map_event;
887         cpu_pmu->pmu.attr_groups        = armv8_pmuv3_attr_groups;
888         return armv8pmu_probe_num_events(cpu_pmu);
889 }
890
891 static const struct of_device_id armv8_pmu_of_device_ids[] = {
892         {.compatible = "arm,armv8-pmuv3",       .data = armv8_pmuv3_init},
893         {.compatible = "arm,cortex-a53-pmu",    .data = armv8_a53_pmu_init},
894         {.compatible = "arm,cortex-a57-pmu",    .data = armv8_a57_pmu_init},
895         {.compatible = "arm,cortex-a72-pmu",    .data = armv8_a72_pmu_init},
896         {.compatible = "cavium,thunder-pmu",    .data = armv8_thunder_pmu_init},
897         {},
898 };
899
900 static int armv8_pmu_device_probe(struct platform_device *pdev)
901 {
902         return arm_pmu_device_probe(pdev, armv8_pmu_of_device_ids, NULL);
903 }
904
905 static struct platform_driver armv8_pmu_driver = {
906         .driver         = {
907                 .name   = "armv8-pmu",
908                 .of_match_table = armv8_pmu_of_device_ids,
909         },
910         .probe          = armv8_pmu_device_probe,
911 };
912
913 static int __init register_armv8_pmu_driver(void)
914 {
915         return platform_driver_register(&armv8_pmu_driver);
916 }
917 device_initcall(register_armv8_pmu_driver);