]>
Commit | Line | Data |
---|---|---|
03089688 WD |
1 | /* |
2 | * PMU support | |
3 | * | |
4 | * Copyright (C) 2012 ARM Limited | |
5 | * Author: Will Deacon <will.deacon@arm.com> | |
6 | * | |
7 | * This code is based heavily on the ARMv7 perf event code. | |
8 | * | |
9 | * This program is free software; you can redistribute it and/or modify | |
10 | * it under the terms of the GNU General Public License version 2 as | |
11 | * published by the Free Software Foundation. | |
12 | * | |
13 | * This program is distributed in the hope that it will be useful, | |
14 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
15 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
16 | * GNU General Public License for more details. | |
17 | * | |
18 | * You should have received a copy of the GNU General Public License | |
19 | * along with this program. If not, see <http://www.gnu.org/licenses/>. | |
20 | */ | |
03089688 | 21 | |
03089688 | 22 | #include <asm/irq_regs.h> |
03089688 | 23 | |
6475b2d8 MR |
24 | #include <linux/of.h> |
25 | #include <linux/perf/arm_pmu.h> | |
26 | #include <linux/platform_device.h> | |
03089688 WD |
27 | |
28 | /* | |
29 | * ARMv8 PMUv3 Performance Events handling code. | |
30 | * Common event types. | |
31 | */ | |
03089688 | 32 | |
90381cba DR |
33 | /* Required events. */ |
34 | #define ARMV8_PMUV3_PERFCTR_PMNC_SW_INCR 0x00 | |
35 | #define ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL 0x03 | |
36 | #define ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS 0x04 | |
37 | #define ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED 0x10 | |
38 | #define ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES 0x11 | |
39 | #define ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED 0x12 | |
03089688 | 40 | |
90381cba DR |
41 | /* At least one of the following is required. */ |
42 | #define ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED 0x08 | |
43 | #define ARMV8_PMUV3_PERFCTR_OP_SPEC 0x1B | |
03089688 | 44 | |
90381cba DR |
45 | /* Common architectural events. */ |
46 | #define ARMV8_PMUV3_PERFCTR_MEM_READ 0x06 | |
47 | #define ARMV8_PMUV3_PERFCTR_MEM_WRITE 0x07 | |
48 | #define ARMV8_PMUV3_PERFCTR_EXC_TAKEN 0x09 | |
49 | #define ARMV8_PMUV3_PERFCTR_EXC_EXECUTED 0x0A | |
50 | #define ARMV8_PMUV3_PERFCTR_CID_WRITE 0x0B | |
51 | #define ARMV8_PMUV3_PERFCTR_PC_WRITE 0x0C | |
52 | #define ARMV8_PMUV3_PERFCTR_PC_IMM_BRANCH 0x0D | |
53 | #define ARMV8_PMUV3_PERFCTR_PC_PROC_RETURN 0x0E | |
54 | #define ARMV8_PMUV3_PERFCTR_MEM_UNALIGNED_ACCESS 0x0F | |
55 | #define ARMV8_PMUV3_PERFCTR_TTBR_WRITE 0x1C | |
9e9caa6a DR |
56 | #define ARMV8_PMUV3_PERFCTR_CHAIN 0x1E |
57 | #define ARMV8_PMUV3_PERFCTR_BR_RETIRED 0x21 | |
90381cba DR |
58 | |
59 | /* Common microarchitectural events. */ | |
60 | #define ARMV8_PMUV3_PERFCTR_L1_ICACHE_REFILL 0x01 | |
61 | #define ARMV8_PMUV3_PERFCTR_ITLB_REFILL 0x02 | |
62 | #define ARMV8_PMUV3_PERFCTR_DTLB_REFILL 0x05 | |
63 | #define ARMV8_PMUV3_PERFCTR_MEM_ACCESS 0x13 | |
64 | #define ARMV8_PMUV3_PERFCTR_L1_ICACHE_ACCESS 0x14 | |
65 | #define ARMV8_PMUV3_PERFCTR_L1_DCACHE_WB 0x15 | |
66 | #define ARMV8_PMUV3_PERFCTR_L2_CACHE_ACCESS 0x16 | |
67 | #define ARMV8_PMUV3_PERFCTR_L2_CACHE_REFILL 0x17 | |
68 | #define ARMV8_PMUV3_PERFCTR_L2_CACHE_WB 0x18 | |
69 | #define ARMV8_PMUV3_PERFCTR_BUS_ACCESS 0x19 | |
70 | #define ARMV8_PMUV3_PERFCTR_MEM_ERROR 0x1A | |
71 | #define ARMV8_PMUV3_PERFCTR_BUS_CYCLES 0x1D | |
9e9caa6a DR |
72 | #define ARMV8_PMUV3_PERFCTR_L1D_CACHE_ALLOCATE 0x1F |
73 | #define ARMV8_PMUV3_PERFCTR_L2D_CACHE_ALLOCATE 0x20 | |
74 | #define ARMV8_PMUV3_PERFCTR_BR_MIS_PRED_RETIRED 0x22 | |
75 | #define ARMV8_PMUV3_PERFCTR_STALL_FRONTEND 0x23 | |
76 | #define ARMV8_PMUV3_PERFCTR_STALL_BACKEND 0x24 | |
77 | #define ARMV8_PMUV3_PERFCTR_L1D_TLB 0x25 | |
78 | #define ARMV8_PMUV3_PERFCTR_L1I_TLB 0x26 | |
79 | #define ARMV8_PMUV3_PERFCTR_L2I_CACHE 0x27 | |
80 | #define ARMV8_PMUV3_PERFCTR_L2I_CACHE_REFILL 0x28 | |
81 | #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_ALLOCATE 0x29 | |
82 | #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_REFILL 0x2A | |
83 | #define ARMV8_PMUV3_PERFCTR_L3D_CACHE 0x2B | |
84 | #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_WB 0x2C | |
85 | #define ARMV8_PMUV3_PERFCTR_L2D_TLB_REFILL 0x2D | |
86 | #define ARMV8_PMUV3_PERFCTR_L21_TLB_REFILL 0x2E | |
87 | #define ARMV8_PMUV3_PERFCTR_L2D_TLB 0x2F | |
88 | #define ARMV8_PMUV3_PERFCTR_L21_TLB 0x30 | |
03089688 | 89 | |
ac82d127 | 90 | /* ARMv8 Cortex-A53 specific event types. */ |
90381cba | 91 | #define ARMV8_A53_PERFCTR_PREFETCH_LINEFILL 0xC2 |
ac82d127 | 92 | |
62a4dda9 | 93 | /* ARMv8 Cortex-A57 specific event types. */ |
90381cba DR |
94 | #define ARMV8_A57_PERFCTR_L1_DCACHE_ACCESS_LD 0x40 |
95 | #define ARMV8_A57_PERFCTR_L1_DCACHE_ACCESS_ST 0x41 | |
96 | #define ARMV8_A57_PERFCTR_L1_DCACHE_REFILL_LD 0x42 | |
97 | #define ARMV8_A57_PERFCTR_L1_DCACHE_REFILL_ST 0x43 | |
98 | #define ARMV8_A57_PERFCTR_DTLB_REFILL_LD 0x4c | |
99 | #define ARMV8_A57_PERFCTR_DTLB_REFILL_ST 0x4d | |
62a4dda9 | 100 | |
03089688 WD |
101 | /* PMUv3 HW events mapping. */ |
102 | static const unsigned armv8_pmuv3_perf_map[PERF_COUNT_HW_MAX] = { | |
ae2fb7ec | 103 | PERF_MAP_ALL_UNSUPPORTED, |
f46f979f | 104 | [PERF_COUNT_HW_CPU_CYCLES] = ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES, |
03089688 WD |
105 | [PERF_COUNT_HW_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED, |
106 | [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS, | |
107 | [PERF_COUNT_HW_CACHE_MISSES] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL, | |
03089688 | 108 | [PERF_COUNT_HW_BRANCH_MISSES] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED, |
03089688 WD |
109 | }; |
110 | ||
ac82d127 MR |
111 | /* ARM Cortex-A53 HW events mapping. */ |
112 | static const unsigned armv8_a53_perf_map[PERF_COUNT_HW_MAX] = { | |
113 | PERF_MAP_ALL_UNSUPPORTED, | |
114 | [PERF_COUNT_HW_CPU_CYCLES] = ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES, | |
115 | [PERF_COUNT_HW_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED, | |
116 | [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS, | |
117 | [PERF_COUNT_HW_CACHE_MISSES] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL, | |
118 | [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_PC_WRITE, | |
119 | [PERF_COUNT_HW_BRANCH_MISSES] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED, | |
120 | [PERF_COUNT_HW_BUS_CYCLES] = ARMV8_PMUV3_PERFCTR_BUS_CYCLES, | |
121 | }; | |
122 | ||
62a4dda9 MR |
123 | static const unsigned armv8_a57_perf_map[PERF_COUNT_HW_MAX] = { |
124 | PERF_MAP_ALL_UNSUPPORTED, | |
125 | [PERF_COUNT_HW_CPU_CYCLES] = ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES, | |
126 | [PERF_COUNT_HW_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED, | |
127 | [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS, | |
128 | [PERF_COUNT_HW_CACHE_MISSES] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL, | |
129 | [PERF_COUNT_HW_BRANCH_MISSES] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED, | |
130 | [PERF_COUNT_HW_BUS_CYCLES] = ARMV8_PMUV3_PERFCTR_BUS_CYCLES, | |
131 | }; | |
132 | ||
03089688 WD |
133 | static const unsigned armv8_pmuv3_perf_cache_map[PERF_COUNT_HW_CACHE_MAX] |
134 | [PERF_COUNT_HW_CACHE_OP_MAX] | |
135 | [PERF_COUNT_HW_CACHE_RESULT_MAX] = { | |
ae2fb7ec MR |
136 | PERF_CACHE_MAP_ALL_UNSUPPORTED, |
137 | ||
138 | [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS, | |
139 | [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL, | |
140 | [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS, | |
141 | [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL, | |
142 | ||
143 | [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED, | |
144 | [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED, | |
145 | [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED, | |
146 | [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED, | |
03089688 WD |
147 | }; |
148 | ||
ac82d127 MR |
149 | static const unsigned armv8_a53_perf_cache_map[PERF_COUNT_HW_CACHE_MAX] |
150 | [PERF_COUNT_HW_CACHE_OP_MAX] | |
151 | [PERF_COUNT_HW_CACHE_RESULT_MAX] = { | |
152 | PERF_CACHE_MAP_ALL_UNSUPPORTED, | |
153 | ||
154 | [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS, | |
155 | [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL, | |
156 | [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS, | |
157 | [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL, | |
158 | [C(L1D)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV8_A53_PERFCTR_PREFETCH_LINEFILL, | |
159 | ||
160 | [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1_ICACHE_ACCESS, | |
161 | [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1_ICACHE_REFILL, | |
162 | ||
163 | [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_ITLB_REFILL, | |
164 | ||
165 | [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED, | |
166 | [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED, | |
167 | [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED, | |
168 | [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED, | |
169 | }; | |
170 | ||
62a4dda9 MR |
171 | static const unsigned armv8_a57_perf_cache_map[PERF_COUNT_HW_CACHE_MAX] |
172 | [PERF_COUNT_HW_CACHE_OP_MAX] | |
173 | [PERF_COUNT_HW_CACHE_RESULT_MAX] = { | |
174 | PERF_CACHE_MAP_ALL_UNSUPPORTED, | |
175 | ||
176 | [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_A57_PERFCTR_L1_DCACHE_ACCESS_LD, | |
177 | [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_A57_PERFCTR_L1_DCACHE_REFILL_LD, | |
178 | [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_A57_PERFCTR_L1_DCACHE_ACCESS_ST, | |
179 | [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_A57_PERFCTR_L1_DCACHE_REFILL_ST, | |
180 | ||
181 | [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1_ICACHE_ACCESS, | |
182 | [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1_ICACHE_REFILL, | |
183 | ||
184 | [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_A57_PERFCTR_DTLB_REFILL_LD, | |
185 | [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_A57_PERFCTR_DTLB_REFILL_ST, | |
186 | ||
187 | [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_ITLB_REFILL, | |
188 | ||
189 | [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED, | |
190 | [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED, | |
191 | [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED, | |
192 | [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED, | |
193 | }; | |
194 | ||
9e9caa6a DR |
195 | #define ARMV8_EVENT_ATTR_RESOLVE(m) #m |
196 | #define ARMV8_EVENT_ATTR(name, config) \ | |
197 | PMU_EVENT_ATTR_STRING(name, armv8_event_attr_##name, \ | |
198 | "event=" ARMV8_EVENT_ATTR_RESOLVE(config)) | |
199 | ||
200 | ARMV8_EVENT_ATTR(sw_incr, ARMV8_PMUV3_PERFCTR_PMNC_SW_INCR); | |
201 | ARMV8_EVENT_ATTR(l1i_cache_refill, ARMV8_PMUV3_PERFCTR_L1_ICACHE_REFILL); | |
202 | ARMV8_EVENT_ATTR(l1i_tlb_refill, ARMV8_PMUV3_PERFCTR_ITLB_REFILL); | |
203 | ARMV8_EVENT_ATTR(l1d_cache_refill, ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL); | |
204 | ARMV8_EVENT_ATTR(l1d_cache, ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS); | |
205 | ARMV8_EVENT_ATTR(l1d_tlb_refill, ARMV8_PMUV3_PERFCTR_DTLB_REFILL); | |
206 | ARMV8_EVENT_ATTR(ld_retired, ARMV8_PMUV3_PERFCTR_MEM_READ); | |
207 | ARMV8_EVENT_ATTR(st_retired, ARMV8_PMUV3_PERFCTR_MEM_WRITE); | |
208 | ARMV8_EVENT_ATTR(inst_retired, ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED); | |
209 | ARMV8_EVENT_ATTR(exc_taken, ARMV8_PMUV3_PERFCTR_EXC_TAKEN); | |
210 | ARMV8_EVENT_ATTR(exc_return, ARMV8_PMUV3_PERFCTR_EXC_EXECUTED); | |
211 | ARMV8_EVENT_ATTR(cid_write_retired, ARMV8_PMUV3_PERFCTR_CID_WRITE); | |
212 | ARMV8_EVENT_ATTR(pc_write_retired, ARMV8_PMUV3_PERFCTR_PC_WRITE); | |
213 | ARMV8_EVENT_ATTR(br_immed_retired, ARMV8_PMUV3_PERFCTR_PC_IMM_BRANCH); | |
214 | ARMV8_EVENT_ATTR(br_return_retired, ARMV8_PMUV3_PERFCTR_PC_PROC_RETURN); | |
215 | ARMV8_EVENT_ATTR(unaligned_ldst_retired, ARMV8_PMUV3_PERFCTR_MEM_UNALIGNED_ACCESS); | |
216 | ARMV8_EVENT_ATTR(br_mis_pred, ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED); | |
217 | ARMV8_EVENT_ATTR(cpu_cycles, ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES); | |
218 | ARMV8_EVENT_ATTR(br_pred, ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED); | |
219 | ARMV8_EVENT_ATTR(mem_access, ARMV8_PMUV3_PERFCTR_MEM_ACCESS); | |
220 | ARMV8_EVENT_ATTR(l1i_cache, ARMV8_PMUV3_PERFCTR_L1_ICACHE_ACCESS); | |
221 | ARMV8_EVENT_ATTR(l1d_cache_wb, ARMV8_PMUV3_PERFCTR_L1_DCACHE_WB); | |
222 | ARMV8_EVENT_ATTR(l2d_cache, ARMV8_PMUV3_PERFCTR_L2_CACHE_ACCESS); | |
223 | ARMV8_EVENT_ATTR(l2d_cache_refill, ARMV8_PMUV3_PERFCTR_L2_CACHE_REFILL); | |
224 | ARMV8_EVENT_ATTR(l2d_cache_wb, ARMV8_PMUV3_PERFCTR_L2_CACHE_WB); | |
225 | ARMV8_EVENT_ATTR(bus_access, ARMV8_PMUV3_PERFCTR_BUS_ACCESS); | |
226 | ARMV8_EVENT_ATTR(memory_error, ARMV8_PMUV3_PERFCTR_MEM_ERROR); | |
227 | ARMV8_EVENT_ATTR(inst_spec, ARMV8_PMUV3_PERFCTR_OP_SPEC); | |
228 | ARMV8_EVENT_ATTR(ttbr_write_retired, ARMV8_PMUV3_PERFCTR_TTBR_WRITE); | |
229 | ARMV8_EVENT_ATTR(bus_cycles, ARMV8_PMUV3_PERFCTR_BUS_CYCLES); | |
230 | ARMV8_EVENT_ATTR(chain, ARMV8_PMUV3_PERFCTR_CHAIN); | |
231 | ARMV8_EVENT_ATTR(l1d_cache_allocate, ARMV8_PMUV3_PERFCTR_L1D_CACHE_ALLOCATE); | |
232 | ARMV8_EVENT_ATTR(l2d_cache_allocate, ARMV8_PMUV3_PERFCTR_L2D_CACHE_ALLOCATE); | |
233 | ARMV8_EVENT_ATTR(br_retired, ARMV8_PMUV3_PERFCTR_BR_RETIRED); | |
234 | ARMV8_EVENT_ATTR(br_mis_pred_retired, ARMV8_PMUV3_PERFCTR_BR_MIS_PRED_RETIRED); | |
235 | ARMV8_EVENT_ATTR(stall_frontend, ARMV8_PMUV3_PERFCTR_STALL_FRONTEND); | |
236 | ARMV8_EVENT_ATTR(stall_backend, ARMV8_PMUV3_PERFCTR_STALL_BACKEND); | |
237 | ARMV8_EVENT_ATTR(l1d_tlb, ARMV8_PMUV3_PERFCTR_L1D_TLB); | |
238 | ARMV8_EVENT_ATTR(l1i_tlb, ARMV8_PMUV3_PERFCTR_L1I_TLB); | |
239 | ARMV8_EVENT_ATTR(l2i_cache, ARMV8_PMUV3_PERFCTR_L2I_CACHE); | |
240 | ARMV8_EVENT_ATTR(l2i_cache_refill, ARMV8_PMUV3_PERFCTR_L2I_CACHE_REFILL); | |
241 | ARMV8_EVENT_ATTR(l3d_cache_allocate, ARMV8_PMUV3_PERFCTR_L3D_CACHE_ALLOCATE); | |
242 | ARMV8_EVENT_ATTR(l3d_cache_refill, ARMV8_PMUV3_PERFCTR_L3D_CACHE_REFILL); | |
243 | ARMV8_EVENT_ATTR(l3d_cache, ARMV8_PMUV3_PERFCTR_L3D_CACHE); | |
244 | ARMV8_EVENT_ATTR(l3d_cache_wb, ARMV8_PMUV3_PERFCTR_L3D_CACHE_WB); | |
245 | ARMV8_EVENT_ATTR(l2d_tlb_refill, ARMV8_PMUV3_PERFCTR_L2D_TLB_REFILL); | |
246 | ARMV8_EVENT_ATTR(l21_tlb_refill, ARMV8_PMUV3_PERFCTR_L21_TLB_REFILL); | |
247 | ARMV8_EVENT_ATTR(l2d_tlb, ARMV8_PMUV3_PERFCTR_L2D_TLB); | |
248 | ARMV8_EVENT_ATTR(l21_tlb, ARMV8_PMUV3_PERFCTR_L21_TLB); | |
249 | ||
250 | static struct attribute *armv8_pmuv3_event_attrs[] = { | |
251 | &armv8_event_attr_sw_incr.attr.attr, | |
252 | &armv8_event_attr_l1i_cache_refill.attr.attr, | |
253 | &armv8_event_attr_l1i_tlb_refill.attr.attr, | |
254 | &armv8_event_attr_l1d_cache_refill.attr.attr, | |
255 | &armv8_event_attr_l1d_cache.attr.attr, | |
256 | &armv8_event_attr_l1d_tlb_refill.attr.attr, | |
257 | &armv8_event_attr_ld_retired.attr.attr, | |
258 | &armv8_event_attr_st_retired.attr.attr, | |
259 | &armv8_event_attr_inst_retired.attr.attr, | |
260 | &armv8_event_attr_exc_taken.attr.attr, | |
261 | &armv8_event_attr_exc_return.attr.attr, | |
262 | &armv8_event_attr_cid_write_retired.attr.attr, | |
263 | &armv8_event_attr_pc_write_retired.attr.attr, | |
264 | &armv8_event_attr_br_immed_retired.attr.attr, | |
265 | &armv8_event_attr_br_return_retired.attr.attr, | |
266 | &armv8_event_attr_unaligned_ldst_retired.attr.attr, | |
267 | &armv8_event_attr_br_mis_pred.attr.attr, | |
268 | &armv8_event_attr_cpu_cycles.attr.attr, | |
269 | &armv8_event_attr_br_pred.attr.attr, | |
270 | &armv8_event_attr_mem_access.attr.attr, | |
271 | &armv8_event_attr_l1i_cache.attr.attr, | |
272 | &armv8_event_attr_l1d_cache_wb.attr.attr, | |
273 | &armv8_event_attr_l2d_cache.attr.attr, | |
274 | &armv8_event_attr_l2d_cache_refill.attr.attr, | |
275 | &armv8_event_attr_l2d_cache_wb.attr.attr, | |
276 | &armv8_event_attr_bus_access.attr.attr, | |
277 | &armv8_event_attr_memory_error.attr.attr, | |
278 | &armv8_event_attr_inst_spec.attr.attr, | |
279 | &armv8_event_attr_ttbr_write_retired.attr.attr, | |
280 | &armv8_event_attr_bus_cycles.attr.attr, | |
281 | &armv8_event_attr_chain.attr.attr, | |
282 | &armv8_event_attr_l1d_cache_allocate.attr.attr, | |
283 | &armv8_event_attr_l2d_cache_allocate.attr.attr, | |
284 | &armv8_event_attr_br_retired.attr.attr, | |
285 | &armv8_event_attr_br_mis_pred_retired.attr.attr, | |
286 | &armv8_event_attr_stall_frontend.attr.attr, | |
287 | &armv8_event_attr_stall_backend.attr.attr, | |
288 | &armv8_event_attr_l1d_tlb.attr.attr, | |
289 | &armv8_event_attr_l1i_tlb.attr.attr, | |
290 | &armv8_event_attr_l2i_cache.attr.attr, | |
291 | &armv8_event_attr_l2i_cache_refill.attr.attr, | |
292 | &armv8_event_attr_l3d_cache_allocate.attr.attr, | |
293 | &armv8_event_attr_l3d_cache_refill.attr.attr, | |
294 | &armv8_event_attr_l3d_cache.attr.attr, | |
295 | &armv8_event_attr_l3d_cache_wb.attr.attr, | |
296 | &armv8_event_attr_l2d_tlb_refill.attr.attr, | |
297 | &armv8_event_attr_l21_tlb_refill.attr.attr, | |
298 | &armv8_event_attr_l2d_tlb.attr.attr, | |
299 | &armv8_event_attr_l21_tlb.attr.attr, | |
300 | NULL | |
301 | }; | |
302 | ||
303 | static struct attribute_group armv8_pmuv3_events_attr_group = { | |
304 | .name = "events", | |
305 | .attrs = armv8_pmuv3_event_attrs, | |
306 | }; | |
307 | ||
308 | static const struct attribute_group *armv8_pmuv3_attr_groups[] = { | |
309 | &armv8_pmuv3_events_attr_group, | |
310 | NULL | |
311 | }; | |
62a4dda9 | 312 | |
03089688 WD |
313 | /* |
314 | * Perf Events' indices | |
315 | */ | |
316 | #define ARMV8_IDX_CYCLE_COUNTER 0 | |
317 | #define ARMV8_IDX_COUNTER0 1 | |
6475b2d8 MR |
318 | #define ARMV8_IDX_COUNTER_LAST(cpu_pmu) \ |
319 | (ARMV8_IDX_CYCLE_COUNTER + cpu_pmu->num_events - 1) | |
03089688 WD |
320 | |
321 | #define ARMV8_MAX_COUNTERS 32 | |
322 | #define ARMV8_COUNTER_MASK (ARMV8_MAX_COUNTERS - 1) | |
323 | ||
324 | /* | |
325 | * ARMv8 low level PMU access | |
326 | */ | |
327 | ||
328 | /* | |
329 | * Perf Event to low level counters mapping | |
330 | */ | |
331 | #define ARMV8_IDX_TO_COUNTER(x) \ | |
332 | (((x) - ARMV8_IDX_COUNTER0) & ARMV8_COUNTER_MASK) | |
333 | ||
334 | /* | |
335 | * Per-CPU PMCR: config reg | |
336 | */ | |
337 | #define ARMV8_PMCR_E (1 << 0) /* Enable all counters */ | |
338 | #define ARMV8_PMCR_P (1 << 1) /* Reset all counters */ | |
339 | #define ARMV8_PMCR_C (1 << 2) /* Cycle counter reset */ | |
340 | #define ARMV8_PMCR_D (1 << 3) /* CCNT counts every 64th cpu cycle */ | |
341 | #define ARMV8_PMCR_X (1 << 4) /* Export to ETM */ | |
342 | #define ARMV8_PMCR_DP (1 << 5) /* Disable CCNT if non-invasive debug*/ | |
343 | #define ARMV8_PMCR_N_SHIFT 11 /* Number of counters supported */ | |
344 | #define ARMV8_PMCR_N_MASK 0x1f | |
345 | #define ARMV8_PMCR_MASK 0x3f /* Mask for writable bits */ | |
346 | ||
347 | /* | |
348 | * PMOVSR: counters overflow flag status reg | |
349 | */ | |
350 | #define ARMV8_OVSR_MASK 0xffffffff /* Mask for writable bits */ | |
351 | #define ARMV8_OVERFLOWED_MASK ARMV8_OVSR_MASK | |
352 | ||
353 | /* | |
354 | * PMXEVTYPER: Event selection reg | |
355 | */ | |
c019de3d VK |
356 | #define ARMV8_EVTYPE_MASK 0xc80003ff /* Mask for writable bits */ |
357 | #define ARMV8_EVTYPE_EVENT 0x3ff /* Mask for EVENT bits */ | |
03089688 WD |
358 | |
359 | /* | |
360 | * Event filters for PMUv3 | |
361 | */ | |
362 | #define ARMV8_EXCLUDE_EL1 (1 << 31) | |
363 | #define ARMV8_EXCLUDE_EL0 (1 << 30) | |
364 | #define ARMV8_INCLUDE_EL2 (1 << 27) | |
365 | ||
366 | static inline u32 armv8pmu_pmcr_read(void) | |
367 | { | |
368 | u32 val; | |
369 | asm volatile("mrs %0, pmcr_el0" : "=r" (val)); | |
370 | return val; | |
371 | } | |
372 | ||
373 | static inline void armv8pmu_pmcr_write(u32 val) | |
374 | { | |
375 | val &= ARMV8_PMCR_MASK; | |
376 | isb(); | |
377 | asm volatile("msr pmcr_el0, %0" :: "r" (val)); | |
378 | } | |
379 | ||
380 | static inline int armv8pmu_has_overflowed(u32 pmovsr) | |
381 | { | |
382 | return pmovsr & ARMV8_OVERFLOWED_MASK; | |
383 | } | |
384 | ||
6475b2d8 | 385 | static inline int armv8pmu_counter_valid(struct arm_pmu *cpu_pmu, int idx) |
03089688 | 386 | { |
6475b2d8 MR |
387 | return idx >= ARMV8_IDX_CYCLE_COUNTER && |
388 | idx <= ARMV8_IDX_COUNTER_LAST(cpu_pmu); | |
03089688 WD |
389 | } |
390 | ||
391 | static inline int armv8pmu_counter_has_overflowed(u32 pmnc, int idx) | |
392 | { | |
6475b2d8 | 393 | return pmnc & BIT(ARMV8_IDX_TO_COUNTER(idx)); |
03089688 WD |
394 | } |
395 | ||
396 | static inline int armv8pmu_select_counter(int idx) | |
397 | { | |
6475b2d8 | 398 | u32 counter = ARMV8_IDX_TO_COUNTER(idx); |
03089688 WD |
399 | asm volatile("msr pmselr_el0, %0" :: "r" (counter)); |
400 | isb(); | |
401 | ||
402 | return idx; | |
403 | } | |
404 | ||
6475b2d8 | 405 | static inline u32 armv8pmu_read_counter(struct perf_event *event) |
03089688 | 406 | { |
6475b2d8 MR |
407 | struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu); |
408 | struct hw_perf_event *hwc = &event->hw; | |
409 | int idx = hwc->idx; | |
03089688 WD |
410 | u32 value = 0; |
411 | ||
6475b2d8 | 412 | if (!armv8pmu_counter_valid(cpu_pmu, idx)) |
03089688 WD |
413 | pr_err("CPU%u reading wrong counter %d\n", |
414 | smp_processor_id(), idx); | |
415 | else if (idx == ARMV8_IDX_CYCLE_COUNTER) | |
416 | asm volatile("mrs %0, pmccntr_el0" : "=r" (value)); | |
417 | else if (armv8pmu_select_counter(idx) == idx) | |
418 | asm volatile("mrs %0, pmxevcntr_el0" : "=r" (value)); | |
419 | ||
420 | return value; | |
421 | } | |
422 | ||
6475b2d8 | 423 | static inline void armv8pmu_write_counter(struct perf_event *event, u32 value) |
03089688 | 424 | { |
6475b2d8 MR |
425 | struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu); |
426 | struct hw_perf_event *hwc = &event->hw; | |
427 | int idx = hwc->idx; | |
428 | ||
429 | if (!armv8pmu_counter_valid(cpu_pmu, idx)) | |
03089688 WD |
430 | pr_err("CPU%u writing wrong counter %d\n", |
431 | smp_processor_id(), idx); | |
432 | else if (idx == ARMV8_IDX_CYCLE_COUNTER) | |
433 | asm volatile("msr pmccntr_el0, %0" :: "r" (value)); | |
434 | else if (armv8pmu_select_counter(idx) == idx) | |
435 | asm volatile("msr pmxevcntr_el0, %0" :: "r" (value)); | |
436 | } | |
437 | ||
438 | static inline void armv8pmu_write_evtype(int idx, u32 val) | |
439 | { | |
440 | if (armv8pmu_select_counter(idx) == idx) { | |
441 | val &= ARMV8_EVTYPE_MASK; | |
442 | asm volatile("msr pmxevtyper_el0, %0" :: "r" (val)); | |
443 | } | |
444 | } | |
445 | ||
446 | static inline int armv8pmu_enable_counter(int idx) | |
447 | { | |
6475b2d8 | 448 | u32 counter = ARMV8_IDX_TO_COUNTER(idx); |
03089688 WD |
449 | asm volatile("msr pmcntenset_el0, %0" :: "r" (BIT(counter))); |
450 | return idx; | |
451 | } | |
452 | ||
453 | static inline int armv8pmu_disable_counter(int idx) | |
454 | { | |
6475b2d8 | 455 | u32 counter = ARMV8_IDX_TO_COUNTER(idx); |
03089688 WD |
456 | asm volatile("msr pmcntenclr_el0, %0" :: "r" (BIT(counter))); |
457 | return idx; | |
458 | } | |
459 | ||
460 | static inline int armv8pmu_enable_intens(int idx) | |
461 | { | |
6475b2d8 | 462 | u32 counter = ARMV8_IDX_TO_COUNTER(idx); |
03089688 WD |
463 | asm volatile("msr pmintenset_el1, %0" :: "r" (BIT(counter))); |
464 | return idx; | |
465 | } | |
466 | ||
467 | static inline int armv8pmu_disable_intens(int idx) | |
468 | { | |
6475b2d8 | 469 | u32 counter = ARMV8_IDX_TO_COUNTER(idx); |
03089688 WD |
470 | asm volatile("msr pmintenclr_el1, %0" :: "r" (BIT(counter))); |
471 | isb(); | |
472 | /* Clear the overflow flag in case an interrupt is pending. */ | |
473 | asm volatile("msr pmovsclr_el0, %0" :: "r" (BIT(counter))); | |
474 | isb(); | |
6475b2d8 | 475 | |
03089688 WD |
476 | return idx; |
477 | } | |
478 | ||
479 | static inline u32 armv8pmu_getreset_flags(void) | |
480 | { | |
481 | u32 value; | |
482 | ||
483 | /* Read */ | |
484 | asm volatile("mrs %0, pmovsclr_el0" : "=r" (value)); | |
485 | ||
486 | /* Write to clear flags */ | |
487 | value &= ARMV8_OVSR_MASK; | |
488 | asm volatile("msr pmovsclr_el0, %0" :: "r" (value)); | |
489 | ||
490 | return value; | |
491 | } | |
492 | ||
6475b2d8 | 493 | static void armv8pmu_enable_event(struct perf_event *event) |
03089688 WD |
494 | { |
495 | unsigned long flags; | |
6475b2d8 MR |
496 | struct hw_perf_event *hwc = &event->hw; |
497 | struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu); | |
498 | struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events); | |
499 | int idx = hwc->idx; | |
03089688 WD |
500 | |
501 | /* | |
502 | * Enable counter and interrupt, and set the counter to count | |
503 | * the event that we're interested in. | |
504 | */ | |
505 | raw_spin_lock_irqsave(&events->pmu_lock, flags); | |
506 | ||
507 | /* | |
508 | * Disable counter | |
509 | */ | |
510 | armv8pmu_disable_counter(idx); | |
511 | ||
512 | /* | |
513 | * Set event (if destined for PMNx counters). | |
514 | */ | |
515 | armv8pmu_write_evtype(idx, hwc->config_base); | |
516 | ||
517 | /* | |
518 | * Enable interrupt for this counter | |
519 | */ | |
520 | armv8pmu_enable_intens(idx); | |
521 | ||
522 | /* | |
523 | * Enable counter | |
524 | */ | |
525 | armv8pmu_enable_counter(idx); | |
526 | ||
527 | raw_spin_unlock_irqrestore(&events->pmu_lock, flags); | |
528 | } | |
529 | ||
6475b2d8 | 530 | static void armv8pmu_disable_event(struct perf_event *event) |
03089688 WD |
531 | { |
532 | unsigned long flags; | |
6475b2d8 MR |
533 | struct hw_perf_event *hwc = &event->hw; |
534 | struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu); | |
535 | struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events); | |
536 | int idx = hwc->idx; | |
03089688 WD |
537 | |
538 | /* | |
539 | * Disable counter and interrupt | |
540 | */ | |
541 | raw_spin_lock_irqsave(&events->pmu_lock, flags); | |
542 | ||
543 | /* | |
544 | * Disable counter | |
545 | */ | |
546 | armv8pmu_disable_counter(idx); | |
547 | ||
548 | /* | |
549 | * Disable interrupt for this counter | |
550 | */ | |
551 | armv8pmu_disable_intens(idx); | |
552 | ||
553 | raw_spin_unlock_irqrestore(&events->pmu_lock, flags); | |
554 | } | |
555 | ||
556 | static irqreturn_t armv8pmu_handle_irq(int irq_num, void *dev) | |
557 | { | |
558 | u32 pmovsr; | |
559 | struct perf_sample_data data; | |
6475b2d8 MR |
560 | struct arm_pmu *cpu_pmu = (struct arm_pmu *)dev; |
561 | struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); | |
03089688 WD |
562 | struct pt_regs *regs; |
563 | int idx; | |
564 | ||
565 | /* | |
566 | * Get and reset the IRQ flags | |
567 | */ | |
568 | pmovsr = armv8pmu_getreset_flags(); | |
569 | ||
570 | /* | |
571 | * Did an overflow occur? | |
572 | */ | |
573 | if (!armv8pmu_has_overflowed(pmovsr)) | |
574 | return IRQ_NONE; | |
575 | ||
576 | /* | |
577 | * Handle the counter(s) overflow(s) | |
578 | */ | |
579 | regs = get_irq_regs(); | |
580 | ||
03089688 WD |
581 | for (idx = 0; idx < cpu_pmu->num_events; ++idx) { |
582 | struct perf_event *event = cpuc->events[idx]; | |
583 | struct hw_perf_event *hwc; | |
584 | ||
585 | /* Ignore if we don't have an event. */ | |
586 | if (!event) | |
587 | continue; | |
588 | ||
589 | /* | |
590 | * We have a single interrupt for all counters. Check that | |
591 | * each counter has overflowed before we process it. | |
592 | */ | |
593 | if (!armv8pmu_counter_has_overflowed(pmovsr, idx)) | |
594 | continue; | |
595 | ||
596 | hwc = &event->hw; | |
6475b2d8 | 597 | armpmu_event_update(event); |
03089688 | 598 | perf_sample_data_init(&data, 0, hwc->last_period); |
6475b2d8 | 599 | if (!armpmu_event_set_period(event)) |
03089688 WD |
600 | continue; |
601 | ||
602 | if (perf_event_overflow(event, &data, regs)) | |
6475b2d8 | 603 | cpu_pmu->disable(event); |
03089688 WD |
604 | } |
605 | ||
606 | /* | |
607 | * Handle the pending perf events. | |
608 | * | |
609 | * Note: this call *must* be run with interrupts disabled. For | |
610 | * platforms that can have the PMU interrupts raised as an NMI, this | |
611 | * will not work. | |
612 | */ | |
613 | irq_work_run(); | |
614 | ||
615 | return IRQ_HANDLED; | |
616 | } | |
617 | ||
6475b2d8 | 618 | static void armv8pmu_start(struct arm_pmu *cpu_pmu) |
03089688 WD |
619 | { |
620 | unsigned long flags; | |
6475b2d8 | 621 | struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events); |
03089688 WD |
622 | |
623 | raw_spin_lock_irqsave(&events->pmu_lock, flags); | |
624 | /* Enable all counters */ | |
625 | armv8pmu_pmcr_write(armv8pmu_pmcr_read() | ARMV8_PMCR_E); | |
626 | raw_spin_unlock_irqrestore(&events->pmu_lock, flags); | |
627 | } | |
628 | ||
6475b2d8 | 629 | static void armv8pmu_stop(struct arm_pmu *cpu_pmu) |
03089688 WD |
630 | { |
631 | unsigned long flags; | |
6475b2d8 | 632 | struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events); |
03089688 WD |
633 | |
634 | raw_spin_lock_irqsave(&events->pmu_lock, flags); | |
635 | /* Disable all counters */ | |
636 | armv8pmu_pmcr_write(armv8pmu_pmcr_read() & ~ARMV8_PMCR_E); | |
637 | raw_spin_unlock_irqrestore(&events->pmu_lock, flags); | |
638 | } | |
639 | ||
640 | static int armv8pmu_get_event_idx(struct pmu_hw_events *cpuc, | |
6475b2d8 | 641 | struct perf_event *event) |
03089688 WD |
642 | { |
643 | int idx; | |
6475b2d8 MR |
644 | struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu); |
645 | struct hw_perf_event *hwc = &event->hw; | |
646 | unsigned long evtype = hwc->config_base & ARMV8_EVTYPE_EVENT; | |
03089688 WD |
647 | |
648 | /* Always place a cycle counter into the cycle counter. */ | |
f46f979f | 649 | if (evtype == ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES) { |
03089688 WD |
650 | if (test_and_set_bit(ARMV8_IDX_CYCLE_COUNTER, cpuc->used_mask)) |
651 | return -EAGAIN; | |
652 | ||
653 | return ARMV8_IDX_CYCLE_COUNTER; | |
654 | } | |
655 | ||
656 | /* | |
657 | * For anything other than a cycle counter, try and use | |
658 | * the events counters | |
659 | */ | |
660 | for (idx = ARMV8_IDX_COUNTER0; idx < cpu_pmu->num_events; ++idx) { | |
661 | if (!test_and_set_bit(idx, cpuc->used_mask)) | |
662 | return idx; | |
663 | } | |
664 | ||
665 | /* The counters are all in use. */ | |
666 | return -EAGAIN; | |
667 | } | |
668 | ||
669 | /* | |
670 | * Add an event filter to a given event. This will only work for PMUv2 PMUs. | |
671 | */ | |
672 | static int armv8pmu_set_event_filter(struct hw_perf_event *event, | |
673 | struct perf_event_attr *attr) | |
674 | { | |
675 | unsigned long config_base = 0; | |
676 | ||
677 | if (attr->exclude_idle) | |
678 | return -EPERM; | |
679 | if (attr->exclude_user) | |
680 | config_base |= ARMV8_EXCLUDE_EL0; | |
681 | if (attr->exclude_kernel) | |
682 | config_base |= ARMV8_EXCLUDE_EL1; | |
683 | if (!attr->exclude_hv) | |
684 | config_base |= ARMV8_INCLUDE_EL2; | |
685 | ||
686 | /* | |
687 | * Install the filter into config_base as this is used to | |
688 | * construct the event type. | |
689 | */ | |
690 | event->config_base = config_base; | |
691 | ||
692 | return 0; | |
693 | } | |
694 | ||
695 | static void armv8pmu_reset(void *info) | |
696 | { | |
6475b2d8 | 697 | struct arm_pmu *cpu_pmu = (struct arm_pmu *)info; |
03089688 WD |
698 | u32 idx, nb_cnt = cpu_pmu->num_events; |
699 | ||
700 | /* The counter and interrupt enable registers are unknown at reset. */ | |
6475b2d8 MR |
701 | for (idx = ARMV8_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) { |
702 | armv8pmu_disable_counter(idx); | |
703 | armv8pmu_disable_intens(idx); | |
704 | } | |
03089688 WD |
705 | |
706 | /* Initialize & Reset PMNC: C and P bits. */ | |
707 | armv8pmu_pmcr_write(ARMV8_PMCR_P | ARMV8_PMCR_C); | |
03089688 WD |
708 | } |
709 | ||
710 | static int armv8_pmuv3_map_event(struct perf_event *event) | |
711 | { | |
6475b2d8 | 712 | return armpmu_map_event(event, &armv8_pmuv3_perf_map, |
c019de3d VK |
713 | &armv8_pmuv3_perf_cache_map, |
714 | ARMV8_EVTYPE_EVENT); | |
03089688 WD |
715 | } |
716 | ||
ac82d127 MR |
717 | static int armv8_a53_map_event(struct perf_event *event) |
718 | { | |
719 | return armpmu_map_event(event, &armv8_a53_perf_map, | |
720 | &armv8_a53_perf_cache_map, | |
721 | ARMV8_EVTYPE_EVENT); | |
722 | } | |
723 | ||
62a4dda9 MR |
724 | static int armv8_a57_map_event(struct perf_event *event) |
725 | { | |
726 | return armpmu_map_event(event, &armv8_a57_perf_map, | |
727 | &armv8_a57_perf_cache_map, | |
728 | ARMV8_EVTYPE_EVENT); | |
729 | } | |
730 | ||
6475b2d8 | 731 | static void armv8pmu_read_num_pmnc_events(void *info) |
03089688 | 732 | { |
6475b2d8 | 733 | int *nb_cnt = info; |
03089688 WD |
734 | |
735 | /* Read the nb of CNTx counters supported from PMNC */ | |
6475b2d8 | 736 | *nb_cnt = (armv8pmu_pmcr_read() >> ARMV8_PMCR_N_SHIFT) & ARMV8_PMCR_N_MASK; |
03089688 | 737 | |
6475b2d8 MR |
738 | /* Add the CPU cycles counter */ |
739 | *nb_cnt += 1; | |
03089688 WD |
740 | } |
741 | ||
6475b2d8 | 742 | static int armv8pmu_probe_num_events(struct arm_pmu *arm_pmu) |
03089688 | 743 | { |
6475b2d8 MR |
744 | return smp_call_function_any(&arm_pmu->supported_cpus, |
745 | armv8pmu_read_num_pmnc_events, | |
746 | &arm_pmu->num_events, 1); | |
03089688 WD |
747 | } |
748 | ||
ac82d127 | 749 | static void armv8_pmu_init(struct arm_pmu *cpu_pmu) |
03089688 | 750 | { |
6475b2d8 MR |
751 | cpu_pmu->handle_irq = armv8pmu_handle_irq, |
752 | cpu_pmu->enable = armv8pmu_enable_event, | |
753 | cpu_pmu->disable = armv8pmu_disable_event, | |
754 | cpu_pmu->read_counter = armv8pmu_read_counter, | |
755 | cpu_pmu->write_counter = armv8pmu_write_counter, | |
756 | cpu_pmu->get_event_idx = armv8pmu_get_event_idx, | |
757 | cpu_pmu->start = armv8pmu_start, | |
758 | cpu_pmu->stop = armv8pmu_stop, | |
759 | cpu_pmu->reset = armv8pmu_reset, | |
760 | cpu_pmu->max_period = (1LLU << 32) - 1, | |
ac82d127 MR |
761 | cpu_pmu->set_event_filter = armv8pmu_set_event_filter; |
762 | } | |
763 | ||
764 | static int armv8_pmuv3_init(struct arm_pmu *cpu_pmu) | |
765 | { | |
766 | armv8_pmu_init(cpu_pmu); | |
6475b2d8 MR |
767 | cpu_pmu->name = "armv8_pmuv3"; |
768 | cpu_pmu->map_event = armv8_pmuv3_map_event; | |
ac82d127 MR |
769 | return armv8pmu_probe_num_events(cpu_pmu); |
770 | } | |
771 | ||
772 | static int armv8_a53_pmu_init(struct arm_pmu *cpu_pmu) | |
773 | { | |
774 | armv8_pmu_init(cpu_pmu); | |
775 | cpu_pmu->name = "armv8_cortex_a53"; | |
776 | cpu_pmu->map_event = armv8_a53_map_event; | |
9e9caa6a | 777 | cpu_pmu->pmu.attr_groups = armv8_pmuv3_attr_groups; |
6475b2d8 | 778 | return armv8pmu_probe_num_events(cpu_pmu); |
03089688 | 779 | } |
03089688 | 780 | |
62a4dda9 MR |
781 | static int armv8_a57_pmu_init(struct arm_pmu *cpu_pmu) |
782 | { | |
783 | armv8_pmu_init(cpu_pmu); | |
784 | cpu_pmu->name = "armv8_cortex_a57"; | |
785 | cpu_pmu->map_event = armv8_a57_map_event; | |
9e9caa6a | 786 | cpu_pmu->pmu.attr_groups = armv8_pmuv3_attr_groups; |
62a4dda9 MR |
787 | return armv8pmu_probe_num_events(cpu_pmu); |
788 | } | |
789 | ||
6475b2d8 MR |
790 | static const struct of_device_id armv8_pmu_of_device_ids[] = { |
791 | {.compatible = "arm,armv8-pmuv3", .data = armv8_pmuv3_init}, | |
ac82d127 | 792 | {.compatible = "arm,cortex-a53-pmu", .data = armv8_a53_pmu_init}, |
62a4dda9 | 793 | {.compatible = "arm,cortex-a57-pmu", .data = armv8_a57_pmu_init}, |
03089688 WD |
794 | {}, |
795 | }; | |
796 | ||
6475b2d8 | 797 | static int armv8_pmu_device_probe(struct platform_device *pdev) |
03089688 | 798 | { |
6475b2d8 | 799 | return arm_pmu_device_probe(pdev, armv8_pmu_of_device_ids, NULL); |
03089688 WD |
800 | } |
801 | ||
6475b2d8 | 802 | static struct platform_driver armv8_pmu_driver = { |
03089688 | 803 | .driver = { |
6475b2d8 MR |
804 | .name = "armv8-pmu", |
805 | .of_match_table = armv8_pmu_of_device_ids, | |
03089688 | 806 | }, |
6475b2d8 | 807 | .probe = armv8_pmu_device_probe, |
03089688 WD |
808 | }; |
809 | ||
6475b2d8 | 810 | static int __init register_armv8_pmu_driver(void) |
03089688 | 811 | { |
6475b2d8 | 812 | return platform_driver_register(&armv8_pmu_driver); |
03089688 | 813 | } |
6475b2d8 | 814 | device_initcall(register_armv8_pmu_driver); |