]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blob - arch/arm64/include/asm/sysreg.h
arm64: cpufeature: Track user visible fields
[mirror_ubuntu-artful-kernel.git] / arch / arm64 / include / asm / sysreg.h
1 /*
2 * Macros for accessing system registers with older binutils.
3 *
4 * Copyright (C) 2014 ARM Ltd.
5 * Author: Catalin Marinas <catalin.marinas@arm.com>
6 *
7 * This program is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
15 *
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
18 */
19
20 #ifndef __ASM_SYSREG_H
21 #define __ASM_SYSREG_H
22
23 #include <linux/stringify.h>
24
25 /*
26 * ARMv8 ARM reserves the following encoding for system registers:
27 * (Ref: ARMv8 ARM, Section: "System instruction class encoding overview",
28 * C5.2, version:ARM DDI 0487A.f)
29 * [20-19] : Op0
30 * [18-16] : Op1
31 * [15-12] : CRn
32 * [11-8] : CRm
33 * [7-5] : Op2
34 */
35 #define Op0_shift 19
36 #define Op0_mask 0x3
37 #define Op1_shift 16
38 #define Op1_mask 0x7
39 #define CRn_shift 12
40 #define CRn_mask 0xf
41 #define CRm_shift 8
42 #define CRm_mask 0xf
43 #define Op2_shift 5
44 #define Op2_mask 0x7
45
46 #define sys_reg(op0, op1, crn, crm, op2) \
47 (((op0) << Op0_shift) | ((op1) << Op1_shift) | \
48 ((crn) << CRn_shift) | ((crm) << CRm_shift) | \
49 ((op2) << Op2_shift))
50
51 #define sys_reg_Op0(id) (((id) >> Op0_shift) & Op0_mask)
52 #define sys_reg_Op1(id) (((id) >> Op1_shift) & Op1_mask)
53 #define sys_reg_CRn(id) (((id) >> CRn_shift) & CRn_mask)
54 #define sys_reg_CRm(id) (((id) >> CRm_shift) & CRm_mask)
55 #define sys_reg_Op2(id) (((id) >> Op2_shift) & Op2_mask)
56
57 #ifndef CONFIG_BROKEN_GAS_INST
58
59 #ifdef __ASSEMBLY__
60 #define __emit_inst(x) .inst (x)
61 #else
62 #define __emit_inst(x) ".inst " __stringify((x)) "\n\t"
63 #endif
64
65 #else /* CONFIG_BROKEN_GAS_INST */
66
67 #ifndef CONFIG_CPU_BIG_ENDIAN
68 #define __INSTR_BSWAP(x) (x)
69 #else /* CONFIG_CPU_BIG_ENDIAN */
70 #define __INSTR_BSWAP(x) ((((x) << 24) & 0xff000000) | \
71 (((x) << 8) & 0x00ff0000) | \
72 (((x) >> 8) & 0x0000ff00) | \
73 (((x) >> 24) & 0x000000ff))
74 #endif /* CONFIG_CPU_BIG_ENDIAN */
75
76 #ifdef __ASSEMBLY__
77 #define __emit_inst(x) .long __INSTR_BSWAP(x)
78 #else /* __ASSEMBLY__ */
79 #define __emit_inst(x) ".long " __stringify(__INSTR_BSWAP(x)) "\n\t"
80 #endif /* __ASSEMBLY__ */
81
82 #endif /* CONFIG_BROKEN_GAS_INST */
83
84 #define SYS_MIDR_EL1 sys_reg(3, 0, 0, 0, 0)
85 #define SYS_MPIDR_EL1 sys_reg(3, 0, 0, 0, 5)
86 #define SYS_REVIDR_EL1 sys_reg(3, 0, 0, 0, 6)
87
88 #define SYS_ID_PFR0_EL1 sys_reg(3, 0, 0, 1, 0)
89 #define SYS_ID_PFR1_EL1 sys_reg(3, 0, 0, 1, 1)
90 #define SYS_ID_DFR0_EL1 sys_reg(3, 0, 0, 1, 2)
91 #define SYS_ID_MMFR0_EL1 sys_reg(3, 0, 0, 1, 4)
92 #define SYS_ID_MMFR1_EL1 sys_reg(3, 0, 0, 1, 5)
93 #define SYS_ID_MMFR2_EL1 sys_reg(3, 0, 0, 1, 6)
94 #define SYS_ID_MMFR3_EL1 sys_reg(3, 0, 0, 1, 7)
95
96 #define SYS_ID_ISAR0_EL1 sys_reg(3, 0, 0, 2, 0)
97 #define SYS_ID_ISAR1_EL1 sys_reg(3, 0, 0, 2, 1)
98 #define SYS_ID_ISAR2_EL1 sys_reg(3, 0, 0, 2, 2)
99 #define SYS_ID_ISAR3_EL1 sys_reg(3, 0, 0, 2, 3)
100 #define SYS_ID_ISAR4_EL1 sys_reg(3, 0, 0, 2, 4)
101 #define SYS_ID_ISAR5_EL1 sys_reg(3, 0, 0, 2, 5)
102 #define SYS_ID_MMFR4_EL1 sys_reg(3, 0, 0, 2, 6)
103
104 #define SYS_MVFR0_EL1 sys_reg(3, 0, 0, 3, 0)
105 #define SYS_MVFR1_EL1 sys_reg(3, 0, 0, 3, 1)
106 #define SYS_MVFR2_EL1 sys_reg(3, 0, 0, 3, 2)
107
108 #define SYS_ID_AA64PFR0_EL1 sys_reg(3, 0, 0, 4, 0)
109 #define SYS_ID_AA64PFR1_EL1 sys_reg(3, 0, 0, 4, 1)
110
111 #define SYS_ID_AA64DFR0_EL1 sys_reg(3, 0, 0, 5, 0)
112 #define SYS_ID_AA64DFR1_EL1 sys_reg(3, 0, 0, 5, 1)
113
114 #define SYS_ID_AA64ISAR0_EL1 sys_reg(3, 0, 0, 6, 0)
115 #define SYS_ID_AA64ISAR1_EL1 sys_reg(3, 0, 0, 6, 1)
116
117 #define SYS_ID_AA64MMFR0_EL1 sys_reg(3, 0, 0, 7, 0)
118 #define SYS_ID_AA64MMFR1_EL1 sys_reg(3, 0, 0, 7, 1)
119 #define SYS_ID_AA64MMFR2_EL1 sys_reg(3, 0, 0, 7, 2)
120
121 #define SYS_CNTFRQ_EL0 sys_reg(3, 3, 14, 0, 0)
122 #define SYS_CTR_EL0 sys_reg(3, 3, 0, 0, 1)
123 #define SYS_DCZID_EL0 sys_reg(3, 3, 0, 0, 7)
124
125 #define REG_PSTATE_PAN_IMM sys_reg(0, 0, 4, 0, 4)
126 #define REG_PSTATE_UAO_IMM sys_reg(0, 0, 4, 0, 3)
127
128 #define SET_PSTATE_PAN(x) __emit_inst(0xd5000000 | REG_PSTATE_PAN_IMM | \
129 (!!x)<<8 | 0x1f)
130 #define SET_PSTATE_UAO(x) __emit_inst(0xd5000000 | REG_PSTATE_UAO_IMM | \
131 (!!x)<<8 | 0x1f)
132
133 /* Common SCTLR_ELx flags. */
134 #define SCTLR_ELx_EE (1 << 25)
135 #define SCTLR_ELx_I (1 << 12)
136 #define SCTLR_ELx_SA (1 << 3)
137 #define SCTLR_ELx_C (1 << 2)
138 #define SCTLR_ELx_A (1 << 1)
139 #define SCTLR_ELx_M 1
140
141 #define SCTLR_ELx_FLAGS (SCTLR_ELx_M | SCTLR_ELx_A | SCTLR_ELx_C | \
142 SCTLR_ELx_SA | SCTLR_ELx_I)
143
144 /* SCTLR_EL1 specific flags. */
145 #define SCTLR_EL1_UCI (1 << 26)
146 #define SCTLR_EL1_SPAN (1 << 23)
147 #define SCTLR_EL1_UCT (1 << 15)
148 #define SCTLR_EL1_SED (1 << 8)
149 #define SCTLR_EL1_CP15BEN (1 << 5)
150
151 /* id_aa64isar0 */
152 #define ID_AA64ISAR0_RDM_SHIFT 28
153 #define ID_AA64ISAR0_ATOMICS_SHIFT 20
154 #define ID_AA64ISAR0_CRC32_SHIFT 16
155 #define ID_AA64ISAR0_SHA2_SHIFT 12
156 #define ID_AA64ISAR0_SHA1_SHIFT 8
157 #define ID_AA64ISAR0_AES_SHIFT 4
158
159 /* id_aa64pfr0 */
160 #define ID_AA64PFR0_GIC_SHIFT 24
161 #define ID_AA64PFR0_ASIMD_SHIFT 20
162 #define ID_AA64PFR0_FP_SHIFT 16
163 #define ID_AA64PFR0_EL3_SHIFT 12
164 #define ID_AA64PFR0_EL2_SHIFT 8
165 #define ID_AA64PFR0_EL1_SHIFT 4
166 #define ID_AA64PFR0_EL0_SHIFT 0
167
168 #define ID_AA64PFR0_FP_NI 0xf
169 #define ID_AA64PFR0_FP_SUPPORTED 0x0
170 #define ID_AA64PFR0_ASIMD_NI 0xf
171 #define ID_AA64PFR0_ASIMD_SUPPORTED 0x0
172 #define ID_AA64PFR0_EL1_64BIT_ONLY 0x1
173 #define ID_AA64PFR0_EL0_64BIT_ONLY 0x1
174 #define ID_AA64PFR0_EL0_32BIT_64BIT 0x2
175
176 /* id_aa64mmfr0 */
177 #define ID_AA64MMFR0_TGRAN4_SHIFT 28
178 #define ID_AA64MMFR0_TGRAN64_SHIFT 24
179 #define ID_AA64MMFR0_TGRAN16_SHIFT 20
180 #define ID_AA64MMFR0_BIGENDEL0_SHIFT 16
181 #define ID_AA64MMFR0_SNSMEM_SHIFT 12
182 #define ID_AA64MMFR0_BIGENDEL_SHIFT 8
183 #define ID_AA64MMFR0_ASID_SHIFT 4
184 #define ID_AA64MMFR0_PARANGE_SHIFT 0
185
186 #define ID_AA64MMFR0_TGRAN4_NI 0xf
187 #define ID_AA64MMFR0_TGRAN4_SUPPORTED 0x0
188 #define ID_AA64MMFR0_TGRAN64_NI 0xf
189 #define ID_AA64MMFR0_TGRAN64_SUPPORTED 0x0
190 #define ID_AA64MMFR0_TGRAN16_NI 0x0
191 #define ID_AA64MMFR0_TGRAN16_SUPPORTED 0x1
192
193 /* id_aa64mmfr1 */
194 #define ID_AA64MMFR1_PAN_SHIFT 20
195 #define ID_AA64MMFR1_LOR_SHIFT 16
196 #define ID_AA64MMFR1_HPD_SHIFT 12
197 #define ID_AA64MMFR1_VHE_SHIFT 8
198 #define ID_AA64MMFR1_VMIDBITS_SHIFT 4
199 #define ID_AA64MMFR1_HADBS_SHIFT 0
200
201 #define ID_AA64MMFR1_VMIDBITS_8 0
202 #define ID_AA64MMFR1_VMIDBITS_16 2
203
204 /* id_aa64mmfr2 */
205 #define ID_AA64MMFR2_LVA_SHIFT 16
206 #define ID_AA64MMFR2_IESB_SHIFT 12
207 #define ID_AA64MMFR2_LSM_SHIFT 8
208 #define ID_AA64MMFR2_UAO_SHIFT 4
209 #define ID_AA64MMFR2_CNP_SHIFT 0
210
211 /* id_aa64dfr0 */
212 #define ID_AA64DFR0_PMSVER_SHIFT 32
213 #define ID_AA64DFR0_CTX_CMPS_SHIFT 28
214 #define ID_AA64DFR0_WRPS_SHIFT 20
215 #define ID_AA64DFR0_BRPS_SHIFT 12
216 #define ID_AA64DFR0_PMUVER_SHIFT 8
217 #define ID_AA64DFR0_TRACEVER_SHIFT 4
218 #define ID_AA64DFR0_DEBUGVER_SHIFT 0
219
220 #define ID_ISAR5_RDM_SHIFT 24
221 #define ID_ISAR5_CRC32_SHIFT 16
222 #define ID_ISAR5_SHA2_SHIFT 12
223 #define ID_ISAR5_SHA1_SHIFT 8
224 #define ID_ISAR5_AES_SHIFT 4
225 #define ID_ISAR5_SEVL_SHIFT 0
226
227 #define MVFR0_FPROUND_SHIFT 28
228 #define MVFR0_FPSHVEC_SHIFT 24
229 #define MVFR0_FPSQRT_SHIFT 20
230 #define MVFR0_FPDIVIDE_SHIFT 16
231 #define MVFR0_FPTRAP_SHIFT 12
232 #define MVFR0_FPDP_SHIFT 8
233 #define MVFR0_FPSP_SHIFT 4
234 #define MVFR0_SIMD_SHIFT 0
235
236 #define MVFR1_SIMDFMAC_SHIFT 28
237 #define MVFR1_FPHP_SHIFT 24
238 #define MVFR1_SIMDHP_SHIFT 20
239 #define MVFR1_SIMDSP_SHIFT 16
240 #define MVFR1_SIMDINT_SHIFT 12
241 #define MVFR1_SIMDLS_SHIFT 8
242 #define MVFR1_FPDNAN_SHIFT 4
243 #define MVFR1_FPFTZ_SHIFT 0
244
245
246 #define ID_AA64MMFR0_TGRAN4_SHIFT 28
247 #define ID_AA64MMFR0_TGRAN64_SHIFT 24
248 #define ID_AA64MMFR0_TGRAN16_SHIFT 20
249
250 #define ID_AA64MMFR0_TGRAN4_NI 0xf
251 #define ID_AA64MMFR0_TGRAN4_SUPPORTED 0x0
252 #define ID_AA64MMFR0_TGRAN64_NI 0xf
253 #define ID_AA64MMFR0_TGRAN64_SUPPORTED 0x0
254 #define ID_AA64MMFR0_TGRAN16_NI 0x0
255 #define ID_AA64MMFR0_TGRAN16_SUPPORTED 0x1
256
257 #if defined(CONFIG_ARM64_4K_PAGES)
258 #define ID_AA64MMFR0_TGRAN_SHIFT ID_AA64MMFR0_TGRAN4_SHIFT
259 #define ID_AA64MMFR0_TGRAN_SUPPORTED ID_AA64MMFR0_TGRAN4_SUPPORTED
260 #elif defined(CONFIG_ARM64_16K_PAGES)
261 #define ID_AA64MMFR0_TGRAN_SHIFT ID_AA64MMFR0_TGRAN16_SHIFT
262 #define ID_AA64MMFR0_TGRAN_SUPPORTED ID_AA64MMFR0_TGRAN16_SUPPORTED
263 #elif defined(CONFIG_ARM64_64K_PAGES)
264 #define ID_AA64MMFR0_TGRAN_SHIFT ID_AA64MMFR0_TGRAN64_SHIFT
265 #define ID_AA64MMFR0_TGRAN_SUPPORTED ID_AA64MMFR0_TGRAN64_SUPPORTED
266 #endif
267
268 #ifdef __ASSEMBLY__
269
270 .irp num,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30
271 .equ .L__reg_num_x\num, \num
272 .endr
273 .equ .L__reg_num_xzr, 31
274
275 .macro mrs_s, rt, sreg
276 __emit_inst(0xd5200000|(\sreg)|(.L__reg_num_\rt))
277 .endm
278
279 .macro msr_s, sreg, rt
280 __emit_inst(0xd5000000|(\sreg)|(.L__reg_num_\rt))
281 .endm
282
283 #else
284
285 #include <linux/types.h>
286
287 asm(
288 " .irp num,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30\n"
289 " .equ .L__reg_num_x\\num, \\num\n"
290 " .endr\n"
291 " .equ .L__reg_num_xzr, 31\n"
292 "\n"
293 " .macro mrs_s, rt, sreg\n"
294 __emit_inst(0xd5200000|(\\sreg)|(.L__reg_num_\\rt))
295 " .endm\n"
296 "\n"
297 " .macro msr_s, sreg, rt\n"
298 __emit_inst(0xd5000000|(\\sreg)|(.L__reg_num_\\rt))
299 " .endm\n"
300 );
301
302 /*
303 * Unlike read_cpuid, calls to read_sysreg are never expected to be
304 * optimized away or replaced with synthetic values.
305 */
306 #define read_sysreg(r) ({ \
307 u64 __val; \
308 asm volatile("mrs %0, " __stringify(r) : "=r" (__val)); \
309 __val; \
310 })
311
312 /*
313 * The "Z" constraint normally means a zero immediate, but when combined with
314 * the "%x0" template means XZR.
315 */
316 #define write_sysreg(v, r) do { \
317 u64 __val = (u64)v; \
318 asm volatile("msr " __stringify(r) ", %x0" \
319 : : "rZ" (__val)); \
320 } while (0)
321
322 /*
323 * For registers without architectural names, or simply unsupported by
324 * GAS.
325 */
326 #define read_sysreg_s(r) ({ \
327 u64 __val; \
328 asm volatile("mrs_s %0, " __stringify(r) : "=r" (__val)); \
329 __val; \
330 })
331
332 #define write_sysreg_s(v, r) do { \
333 u64 __val = (u64)v; \
334 asm volatile("msr_s " __stringify(r) ", %x0" : : "rZ" (__val)); \
335 } while (0)
336
337 static inline void config_sctlr_el1(u32 clear, u32 set)
338 {
339 u32 val;
340
341 val = read_sysreg(sctlr_el1);
342 val &= ~clear;
343 val |= set;
344 write_sysreg(val, sctlr_el1);
345 }
346
347 #endif
348
349 #endif /* __ASM_SYSREG_H */