]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blame - arch/mips/lib/memset.S
Merge commit 'c0053bd50af5' into omap-for-v4.8/soc
[mirror_ubuntu-artful-kernel.git] / arch / mips / lib / memset.S
CommitLineData
1da177e4
LT
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
26c5e07d
SH
8 * Copyright (C) 2007 by Maciej W. Rozycki
9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc.
1da177e4
LT
10 */
11#include <asm/asm.h>
048eb582 12#include <asm/asm-offsets.h>
1da177e4
LT
13#include <asm/regdef.h>
14
a583158c
AN
15#if LONGSIZE == 4
16#define LONG_S_L swl
17#define LONG_S_R swr
18#else
19#define LONG_S_L sdl
20#define LONG_S_R sdr
21#endif
22
26c5e07d
SH
23#ifdef CONFIG_CPU_MICROMIPS
24#define STORSIZE (LONGSIZE * 2)
25#define STORMASK (STORSIZE - 1)
26#define FILL64RG t8
27#define FILLPTRG t7
28#undef LONG_S
29#define LONG_S LONG_SP
30#else
31#define STORSIZE LONGSIZE
32#define STORMASK LONGMASK
33#define FILL64RG a1
34#define FILLPTRG t0
35#endif
36
6d5155c2
MC
37#define LEGACY_MODE 1
38#define EVA_MODE 2
39
fd9720e9
MC
40/*
41 * No need to protect it with EVA #ifdefery. The generated block of code
42 * will never be assembled if EVA is not enabled.
43 */
44#define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
45#define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
46
1da177e4 47#define EX(insn,reg,addr,handler) \
fd9720e9
MC
48 .if \mode == LEGACY_MODE; \
499: insn reg, addr; \
50 .else; \
519: ___BUILD_EVA_INSN(insn, reg, addr); \
52 .endif; \
70342287
RB
53 .section __ex_table,"a"; \
54 PTR 9b, handler; \
1da177e4
LT
55 .previous
56
fd9720e9 57 .macro f_fill64 dst, offset, val, fixup, mode
26c5e07d
SH
58 EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup)
59 EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup)
60 EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup)
61 EX(LONG_S, \val, (\offset + 3 * STORSIZE)(\dst), \fixup)
62#if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
63 EX(LONG_S, \val, (\offset + 4 * STORSIZE)(\dst), \fixup)
64 EX(LONG_S, \val, (\offset + 5 * STORSIZE)(\dst), \fixup)
65 EX(LONG_S, \val, (\offset + 6 * STORSIZE)(\dst), \fixup)
66 EX(LONG_S, \val, (\offset + 7 * STORSIZE)(\dst), \fixup)
67#endif
68#if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
69 EX(LONG_S, \val, (\offset + 8 * STORSIZE)(\dst), \fixup)
70 EX(LONG_S, \val, (\offset + 9 * STORSIZE)(\dst), \fixup)
71 EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
72 EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
73 EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
74 EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
75 EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
76 EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
a583158c 77#endif
1da177e4
LT
78 .endm
79
1da177e4
LT
80 .set noreorder
81 .align 5
1da177e4 82
6d5155c2
MC
83 /*
84 * Macro to generate the __bzero{,_user} symbol
85 * Arguments:
86 * mode: LEGACY_MODE or EVA_MODE
87 */
88 .macro __BUILD_BZERO mode
89 /* Initialize __memset if this is the first time we call this macro */
90 .ifnotdef __memset
91 .set __memset, 1
92 .hidden __memset /* Make sure it does not leak */
93 .endif
1da177e4 94
26c5e07d 95 sltiu t0, a2, STORSIZE /* very small region? */
6d5155c2 96 bnez t0, .Lsmall_memset\@
8483b14a 97 andi t0, a0, STORMASK /* aligned? */
1da177e4 98
26c5e07d
SH
99#ifdef CONFIG_CPU_MICROMIPS
100 move t8, a1 /* used by 'swp' instruction */
101 move t9, a1
102#endif
619b6e18 103#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
1da177e4 104 beqz t0, 1f
8483b14a 105 PTR_SUBU t0, STORSIZE /* alignment in bytes */
619b6e18
MR
106#else
107 .set noat
26c5e07d 108 li AT, STORSIZE
619b6e18 109 beqz t0, 1f
8483b14a 110 PTR_SUBU t0, AT /* alignment in bytes */
619b6e18
MR
111 .set at
112#endif
1da177e4 113
8c56208a 114#ifndef CONFIG_CPU_MIPSR6
930bff88 115 R10KCBARRIER(0(ra))
1da177e4 116#ifdef __MIPSEB__
6d5155c2 117 EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
dd2adea4 118#else
6d5155c2 119 EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
1da177e4
LT
120#endif
121 PTR_SUBU a0, t0 /* long align ptr */
122 PTR_ADDU a2, t0 /* correct size */
123
8c56208a
LY
124#else /* CONFIG_CPU_MIPSR6 */
125#define STORE_BYTE(N) \
126 EX(sb, a1, N(a0), .Lbyte_fixup\@); \
127 beqz t0, 0f; \
128 PTR_ADDU t0, 1;
129
130 PTR_ADDU a2, t0 /* correct size */
131 PTR_ADDU t0, 1
132 STORE_BYTE(0)
133 STORE_BYTE(1)
134#if LONGSIZE == 4
135 EX(sb, a1, 2(a0), .Lbyte_fixup\@)
136#else
137 STORE_BYTE(2)
138 STORE_BYTE(3)
139 STORE_BYTE(4)
140 STORE_BYTE(5)
141 EX(sb, a1, 6(a0), .Lbyte_fixup\@)
142#endif
1430:
144 ori a0, STORMASK
145 xori a0, STORMASK
146 PTR_ADDIU a0, STORSIZE
147#endif /* CONFIG_CPU_MIPSR6 */
1da177e4
LT
1481: ori t1, a2, 0x3f /* # of full blocks */
149 xori t1, 0x3f
6d5155c2 150 beqz t1, .Lmemset_partial\@ /* no block to fill */
8483b14a 151 andi t0, a2, 0x40-STORSIZE
1da177e4
LT
152
153 PTR_ADDU t1, a0 /* end address */
154 .set reorder
1551: PTR_ADDIU a0, 64
930bff88 156 R10KCBARRIER(0(ra))
fd9720e9 157 f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
1da177e4
LT
158 bne t1, a0, 1b
159 .set noreorder
160
6d5155c2 161.Lmemset_partial\@:
930bff88 162 R10KCBARRIER(0(ra))
1da177e4 163 PTR_LA t1, 2f /* where to start */
26c5e07d
SH
164#ifdef CONFIG_CPU_MICROMIPS
165 LONG_SRL t7, t0, 1
166#endif
a583158c 167#if LONGSIZE == 4
26c5e07d 168 PTR_SUBU t1, FILLPTRG
a583158c
AN
169#else
170 .set noat
26c5e07d 171 LONG_SRL AT, FILLPTRG, 1
a583158c 172 PTR_SUBU t1, AT
619b6e18 173 .set at
a583158c 174#endif
1da177e4 175 jr t1
8483b14a 176 PTR_ADDU a0, t0 /* dest ptr */
1da177e4
LT
177
178 .set push
179 .set noreorder
180 .set nomacro
6d5155c2 181 /* ... but first do longs ... */
fd9720e9 182 f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
1da177e4 1832: .set pop
26c5e07d 184 andi a2, STORMASK /* At most one long to go */
1da177e4
LT
185
186 beqz a2, 1f
8c56208a 187#ifndef CONFIG_CPU_MIPSR6
8483b14a 188 PTR_ADDU a0, a2 /* What's left */
930bff88 189 R10KCBARRIER(0(ra))
1da177e4 190#ifdef __MIPSEB__
6d5155c2 191 EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
dd2adea4 192#else
6d5155c2 193 EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
1da177e4 194#endif
8c56208a
LY
195#else
196 PTR_SUBU t0, $0, a2
197 PTR_ADDIU t0, 1
198 STORE_BYTE(0)
199 STORE_BYTE(1)
200#if LONGSIZE == 4
201 EX(sb, a1, 2(a0), .Lbyte_fixup\@)
202#else
203 STORE_BYTE(2)
204 STORE_BYTE(3)
205 STORE_BYTE(4)
206 STORE_BYTE(5)
207 EX(sb, a1, 6(a0), .Lbyte_fixup\@)
208#endif
2090:
210#endif
1da177e4 2111: jr ra
8483b14a 212 move a2, zero
1da177e4 213
6d5155c2 214.Lsmall_memset\@:
1da177e4 215 beqz a2, 2f
8483b14a 216 PTR_ADDU t1, a0, a2
1da177e4
LT
217
2181: PTR_ADDIU a0, 1 /* fill bytewise */
930bff88 219 R10KCBARRIER(0(ra))
1da177e4 220 bne t1, a0, 1b
8483b14a 221 sb a1, -1(a0)
1da177e4
LT
222
2232: jr ra /* done */
8483b14a 224 move a2, zero
6d5155c2 225 .if __memset == 1
1da177e4 226 END(memset)
6d5155c2
MC
227 .set __memset, 0
228 .hidden __memset
229 .endif
1da177e4 230
8e85f275 231#ifdef CONFIG_CPU_MIPSR6
8c56208a
LY
232.Lbyte_fixup\@:
233 PTR_SUBU a2, $0, t0
234 jr ra
235 PTR_ADDIU a2, 1
8e85f275 236#endif /* CONFIG_CPU_MIPSR6 */
8c56208a 237
6d5155c2 238.Lfirst_fixup\@:
1da177e4 239 jr ra
8483b14a 240 nop
1da177e4 241
6d5155c2 242.Lfwd_fixup\@:
1da177e4 243 PTR_L t0, TI_TASK($28)
1da177e4 244 andi a2, 0x3f
e5674ad6 245 LONG_L t0, THREAD_BUADDR(t0)
1da177e4
LT
246 LONG_ADDU a2, t1
247 jr ra
8483b14a 248 LONG_SUBU a2, t0
1da177e4 249
6d5155c2 250.Lpartial_fixup\@:
1da177e4 251 PTR_L t0, TI_TASK($28)
26c5e07d 252 andi a2, STORMASK
e5674ad6 253 LONG_L t0, THREAD_BUADDR(t0)
1da177e4
LT
254 LONG_ADDU a2, t1
255 jr ra
8483b14a 256 LONG_SUBU a2, t0
1da177e4 257
6d5155c2 258.Llast_fixup\@:
1da177e4 259 jr ra
8483b14a 260 andi v1, a2, STORMASK
6d5155c2
MC
261
262 .endm
263
264/*
265 * memset(void *s, int c, size_t n)
266 *
267 * a0: start of area to clear
268 * a1: char to fill with
269 * a2: size of area to clear
270 */
271
272LEAF(memset)
273 beqz a1, 1f
274 move v0, a0 /* result */
275
276 andi a1, 0xff /* spread fillword */
277 LONG_SLL t1, a1, 8
278 or a1, t1
279 LONG_SLL t1, a1, 16
280#if LONGSIZE == 8
281 or a1, t1
282 LONG_SLL t1, a1, 32
283#endif
284 or a1, t1
2851:
fd9720e9 286#ifndef CONFIG_EVA
6d5155c2 287FEXPORT(__bzero)
d6a428fb
JH
288#else
289FEXPORT(__bzero_kernel)
fd9720e9 290#endif
6d5155c2 291 __BUILD_BZERO LEGACY_MODE
fd9720e9
MC
292
293#ifdef CONFIG_EVA
294LEAF(__bzero)
295 __BUILD_BZERO EVA_MODE
296END(__bzero)
297#endif