2 * arch/arm/include/asm/assembler.h
4 * Copyright (C) 1996-2000 Russell King
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
10 * This file contains arm architecture specific defines
11 * for the different processors.
13 * Do not include any C declarations in this file - it is included by
16 #ifndef __ASM_ASSEMBLER_H__
17 #define __ASM_ASSEMBLER_H__
20 #error "Only include this from assembly code"
23 #include <asm/ptrace.h>
24 #include <asm/domain.h>
25 #include <asm/opcodes-virt.h>
30 * Endian independent macros for shifting bytes within registers.
35 #define get_byte_0 lsl #0
36 #define get_byte_1 lsr #8
37 #define get_byte_2 lsr #16
38 #define get_byte_3 lsr #24
39 #define put_byte_0 lsl #0
40 #define put_byte_1 lsl #8
41 #define put_byte_2 lsl #16
42 #define put_byte_3 lsl #24
46 #define get_byte_0 lsr #24
47 #define get_byte_1 lsr #16
48 #define get_byte_2 lsr #8
49 #define get_byte_3 lsl #0
50 #define put_byte_0 lsl #24
51 #define put_byte_1 lsl #16
52 #define put_byte_2 lsl #8
53 #define put_byte_3 lsl #0
56 /* Select code for any configuration running in BE8 mode */
57 #ifdef CONFIG_CPU_ENDIAN_BE8
58 #define ARM_BE8(code...) code
60 #define ARM_BE8(code...)
64 * Data preload for architectures that support it
66 #if __LINUX_ARM_ARCH__ >= 5
67 #define PLD(code...) code
73 * This can be used to enable code to cacheline align the destination
74 * pointer when bulk writing to memory. Experiments on StrongARM and
75 * XScale didn't show this a worthwhile thing to do when the cache is not
76 * set to write-allocate (this would need further testing on XScale when WA
79 * On Feroceon there is much to gain however, regardless of cache mode.
81 #ifdef CONFIG_CPU_FEROCEON
82 #define CALGN(code...) code
84 #define CALGN(code...)
88 * Enable and disable interrupts
90 #if __LINUX_ARM_ARCH__ >= 6
91 .macro disable_irq_notrace
95 .macro enable_irq_notrace
99 .macro disable_irq_notrace
100 msr cpsr_c
, #PSR_I_BIT | SVC_MODE
103 .macro enable_irq_notrace
104 msr cpsr_c
, #SVC_MODE
108 .macro asm_trace_hardirqs_off
109 #if defined(CONFIG_TRACE_IRQFLAGS)
110 stmdb sp
!, {r0
-r3
, ip
, lr
}
111 bl trace_hardirqs_off
112 ldmia sp
!, {r0
-r3
, ip
, lr
}
116 .macro asm_trace_hardirqs_on_cond
, cond
117 #if defined(CONFIG_TRACE_IRQFLAGS)
119 * actually the registers should be pushed and pop'd conditionally, but
120 * after bl the flags are certainly clobbered
122 stmdb sp
!, {r0
-r3
, ip
, lr
}
123 bl\cond trace_hardirqs_on
124 ldmia sp
!, {r0
-r3
, ip
, lr
}
128 .macro asm_trace_hardirqs_on
129 asm_trace_hardirqs_on_cond al
134 asm_trace_hardirqs_off
138 asm_trace_hardirqs_on
142 * Save the current IRQ state and disable IRQs. Note that this macro
143 * assumes FIQs are enabled, and that the processor is in SVC mode.
145 .macro save_and_disable_irqs
, oldcpsr
146 #ifdef CONFIG_CPU_V7M
147 mrs \oldcpsr
, primask
154 .macro save_and_disable_irqs_notrace
, oldcpsr
160 * Restore interrupt state previously stored in a register. We don't
161 * guarantee that this will preserve the flags.
163 .macro restore_irqs_notrace
, oldcpsr
164 #ifdef CONFIG_CPU_V7M
165 msr primask
, \oldcpsr
171 .macro restore_irqs
, oldcpsr
172 tst \oldcpsr
, #PSR_I_BIT
173 asm_trace_hardirqs_on_cond eq
174 restore_irqs_notrace \oldcpsr
178 * Get current thread_info.
180 .macro get_thread_info
, rd
181 ARM( mov
\rd
, sp
, lsr
#13 )
183 THUMB( lsr
\rd
, \rd
, #13 )
184 mov
\rd
, \rd
, lsl
#13
189 .pushsection __ex_table,"a"; \
195 #define ALT_SMP(instr...) \
198 * Note: if you get assembler errors from ALT_UP() when building with
199 * CONFIG_THUMB2_KERNEL, you almost certainly need to use
200 * ALT_SMP( W(instr) ... )
202 #define ALT_UP(instr...) \
203 .pushsection ".alt.smp.init", "a" ;\
206 .if . - 9997b != 4 ;\
207 .error "ALT_UP() content must assemble to exactly 4 bytes";\
210 #define ALT_UP_B(label) \
211 .equ up_b_offset, label - 9998b ;\
212 .pushsection ".alt.smp.init", "a" ;\
214 W(b) . + up_b_offset ;\
217 #define ALT_SMP(instr...)
218 #define ALT_UP(instr...) instr
219 #define ALT_UP_B(label) b label
223 * Instruction barrier
226 #if __LINUX_ARM_ARCH__ >= 7
228 #elif __LINUX_ARM_ARCH__ == 6
229 mcr p15
, 0, r0
, c7
, c5
, 4
234 * SMP data memory barrier
238 #if __LINUX_ARM_ARCH__ >= 7
244 #elif __LINUX_ARM_ARCH__ == 6
245 ALT_SMP(mcr p15
, 0, r0
, c7
, c10
, 5) @ dmb
247 #error Incompatible SMP platform
257 #if defined(CONFIG_CPU_V7M)
259 * setmode is used to assert to be in svc mode during boot. For v7-M
260 * this is done in __v7m_setup, so setmode can be empty here.
262 .macro setmode
, mode
, reg
264 #elif defined(CONFIG_THUMB2_KERNEL)
265 .macro setmode
, mode
, reg
270 .macro setmode
, mode
, reg
276 * Helper macro to enter SVC mode cleanly and mask interrupts. reg is
277 * a scratch register for the macro to overwrite.
279 * This macro is intended for forcing the CPU into SVC mode at boot time.
280 * you cannot return to the original mode.
282 .macro safe_svcmode_maskall reg
:req
283 #if __LINUX_ARM_ARCH__ >= 6
285 eor
\reg
, \reg
, #HYP_MODE
287 bic
\reg
, \reg
, #MODE_MASK
288 orr
\reg
, \reg
, #PSR_I_BIT | PSR_F_BIT | SVC_MODE
289 THUMB( orr
\reg
, \reg
, #PSR_T_BIT )
291 orr
\reg
, \reg
, #PSR_A_BIT
300 * workaround for possibly broken pre-v6 hardware
301 * (akita, Sharp Zaurus C-1000, PXA270-based)
303 setmode PSR_F_BIT
| PSR_I_BIT
| SVC_MODE
, \reg
308 * STRT/LDRT access macros with ARM and Thumb-2 variants
310 #ifdef CONFIG_THUMB2_KERNEL
312 .macro usraccoff
, instr
, reg
, ptr
, inc
, off
, cond
, abort
, t
=TUSER()
315 \instr\cond\
()b\
()\t\
().w
\reg
, [\ptr
, #\off]
317 \instr\cond\
()\t\
().w
\reg
, [\ptr
, #\off]
319 .error
"Unsupported inc macro argument"
322 .pushsection __ex_table
,"a"
328 .macro usracc
, instr
, reg
, ptr
, inc
, cond
, rept
, abort
329 @
explicit IT instruction needed because of the label
330 @ introduced by the USER macro
337 .error
"Unsupported rept macro argument"
341 @ Slightly optimised to avoid incrementing the pointer twice
342 usraccoff \instr
, \reg
, \ptr
, \inc
, 0, \cond
, \abort
344 usraccoff \instr
, \reg
, \ptr
, \inc
, \inc
, \cond
, \abort
347 add\cond \ptr
, #\rept * \inc
350 #else /* !CONFIG_THUMB2_KERNEL */
352 .macro usracc
, instr
, reg
, ptr
, inc
, cond
, rept
, abort
, t
=TUSER()
356 \instr\cond\
()b\
()\t \reg
, [\ptr
], #\inc
358 \instr\cond\
()\t \reg
, [\ptr
], #\inc
360 .error
"Unsupported inc macro argument"
363 .pushsection __ex_table
,"a"
370 #endif /* CONFIG_THUMB2_KERNEL */
372 .macro strusr
, reg
, ptr
, inc
, cond
=al
, rept
=1, abort
=9001f
373 usracc str
, \reg
, \ptr
, \inc
, \cond
, \rept
, \abort
376 .macro ldrusr
, reg
, ptr
, inc
, cond
=al
, rept
=1, abort
=9001f
377 usracc ldr
, \reg
, \ptr
, \inc
, \cond
, \rept
, \abort
380 /* Utility macro for declaring string literals */
381 .macro string name
:req
, string
382 .type
\name
, #object
385 .size
\name
, . - \name
388 .macro check_uaccess
, addr
:req
, size
:req
, limit
:req
, tmp
:req
, bad
:req
389 #ifndef CONFIG_CPU_USE_DOMAINS
390 adds
\tmp
, \addr
, #\size - 1
391 sbcccs
\tmp
, \tmp
, \limit
396 #endif /* __ASM_ASSEMBLER_H__ */