]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blame - arch/mips/lib/memset.S
Merge branch 'for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/pmladek...
[mirror_ubuntu-artful-kernel.git] / arch / mips / lib / memset.S
CommitLineData
1da177e4
LT
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
26c5e07d
SH
8 * Copyright (C) 2007 by Maciej W. Rozycki
9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc.
1da177e4
LT
10 */
11#include <asm/asm.h>
048eb582 12#include <asm/asm-offsets.h>
576a2f0c 13#include <asm/export.h>
1da177e4
LT
14#include <asm/regdef.h>
15
a583158c
AN
16#if LONGSIZE == 4
17#define LONG_S_L swl
18#define LONG_S_R swr
19#else
20#define LONG_S_L sdl
21#define LONG_S_R sdr
22#endif
23
26c5e07d
SH
24#ifdef CONFIG_CPU_MICROMIPS
25#define STORSIZE (LONGSIZE * 2)
26#define STORMASK (STORSIZE - 1)
27#define FILL64RG t8
28#define FILLPTRG t7
29#undef LONG_S
30#define LONG_S LONG_SP
31#else
32#define STORSIZE LONGSIZE
33#define STORMASK LONGMASK
34#define FILL64RG a1
35#define FILLPTRG t0
36#endif
37
6d5155c2
MC
38#define LEGACY_MODE 1
39#define EVA_MODE 2
40
fd9720e9
MC
41/*
42 * No need to protect it with EVA #ifdefery. The generated block of code
43 * will never be assembled if EVA is not enabled.
44 */
45#define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
46#define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
47
1da177e4 48#define EX(insn,reg,addr,handler) \
fd9720e9
MC
49 .if \mode == LEGACY_MODE; \
509: insn reg, addr; \
51 .else; \
529: ___BUILD_EVA_INSN(insn, reg, addr); \
53 .endif; \
70342287
RB
54 .section __ex_table,"a"; \
55 PTR 9b, handler; \
1da177e4
LT
56 .previous
57
fd9720e9 58 .macro f_fill64 dst, offset, val, fixup, mode
26c5e07d
SH
59 EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup)
60 EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup)
61 EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup)
62 EX(LONG_S, \val, (\offset + 3 * STORSIZE)(\dst), \fixup)
63#if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
64 EX(LONG_S, \val, (\offset + 4 * STORSIZE)(\dst), \fixup)
65 EX(LONG_S, \val, (\offset + 5 * STORSIZE)(\dst), \fixup)
66 EX(LONG_S, \val, (\offset + 6 * STORSIZE)(\dst), \fixup)
67 EX(LONG_S, \val, (\offset + 7 * STORSIZE)(\dst), \fixup)
68#endif
69#if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
70 EX(LONG_S, \val, (\offset + 8 * STORSIZE)(\dst), \fixup)
71 EX(LONG_S, \val, (\offset + 9 * STORSIZE)(\dst), \fixup)
72 EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
73 EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
74 EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
75 EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
76 EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
77 EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
a583158c 78#endif
1da177e4
LT
79 .endm
80
1da177e4
LT
81 .set noreorder
82 .align 5
1da177e4 83
6d5155c2
MC
84 /*
85 * Macro to generate the __bzero{,_user} symbol
86 * Arguments:
87 * mode: LEGACY_MODE or EVA_MODE
88 */
89 .macro __BUILD_BZERO mode
90 /* Initialize __memset if this is the first time we call this macro */
91 .ifnotdef __memset
92 .set __memset, 1
93 .hidden __memset /* Make sure it does not leak */
94 .endif
1da177e4 95
26c5e07d 96 sltiu t0, a2, STORSIZE /* very small region? */
6d5155c2 97 bnez t0, .Lsmall_memset\@
8483b14a 98 andi t0, a0, STORMASK /* aligned? */
1da177e4 99
26c5e07d
SH
100#ifdef CONFIG_CPU_MICROMIPS
101 move t8, a1 /* used by 'swp' instruction */
102 move t9, a1
103#endif
619b6e18 104#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
1da177e4 105 beqz t0, 1f
8483b14a 106 PTR_SUBU t0, STORSIZE /* alignment in bytes */
619b6e18
MR
107#else
108 .set noat
26c5e07d 109 li AT, STORSIZE
619b6e18 110 beqz t0, 1f
8483b14a 111 PTR_SUBU t0, AT /* alignment in bytes */
619b6e18
MR
112 .set at
113#endif
1da177e4 114
8c56208a 115#ifndef CONFIG_CPU_MIPSR6
930bff88 116 R10KCBARRIER(0(ra))
1da177e4 117#ifdef __MIPSEB__
6d5155c2 118 EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
dd2adea4 119#else
6d5155c2 120 EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
1da177e4
LT
121#endif
122 PTR_SUBU a0, t0 /* long align ptr */
123 PTR_ADDU a2, t0 /* correct size */
124
8c56208a
LY
125#else /* CONFIG_CPU_MIPSR6 */
126#define STORE_BYTE(N) \
127 EX(sb, a1, N(a0), .Lbyte_fixup\@); \
128 beqz t0, 0f; \
129 PTR_ADDU t0, 1;
130
131 PTR_ADDU a2, t0 /* correct size */
132 PTR_ADDU t0, 1
133 STORE_BYTE(0)
134 STORE_BYTE(1)
135#if LONGSIZE == 4
136 EX(sb, a1, 2(a0), .Lbyte_fixup\@)
137#else
138 STORE_BYTE(2)
139 STORE_BYTE(3)
140 STORE_BYTE(4)
141 STORE_BYTE(5)
142 EX(sb, a1, 6(a0), .Lbyte_fixup\@)
143#endif
1440:
145 ori a0, STORMASK
146 xori a0, STORMASK
147 PTR_ADDIU a0, STORSIZE
148#endif /* CONFIG_CPU_MIPSR6 */
1da177e4
LT
1491: ori t1, a2, 0x3f /* # of full blocks */
150 xori t1, 0x3f
6d5155c2 151 beqz t1, .Lmemset_partial\@ /* no block to fill */
8483b14a 152 andi t0, a2, 0x40-STORSIZE
1da177e4
LT
153
154 PTR_ADDU t1, a0 /* end address */
155 .set reorder
1561: PTR_ADDIU a0, 64
930bff88 157 R10KCBARRIER(0(ra))
fd9720e9 158 f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
1da177e4
LT
159 bne t1, a0, 1b
160 .set noreorder
161
6d5155c2 162.Lmemset_partial\@:
930bff88 163 R10KCBARRIER(0(ra))
1da177e4 164 PTR_LA t1, 2f /* where to start */
26c5e07d
SH
165#ifdef CONFIG_CPU_MICROMIPS
166 LONG_SRL t7, t0, 1
167#endif
a583158c 168#if LONGSIZE == 4
26c5e07d 169 PTR_SUBU t1, FILLPTRG
a583158c
AN
170#else
171 .set noat
26c5e07d 172 LONG_SRL AT, FILLPTRG, 1
a583158c 173 PTR_SUBU t1, AT
619b6e18 174 .set at
a583158c 175#endif
1da177e4 176 jr t1
8483b14a 177 PTR_ADDU a0, t0 /* dest ptr */
1da177e4
LT
178
179 .set push
180 .set noreorder
181 .set nomacro
6d5155c2 182 /* ... but first do longs ... */
fd9720e9 183 f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
1da177e4 1842: .set pop
26c5e07d 185 andi a2, STORMASK /* At most one long to go */
1da177e4
LT
186
187 beqz a2, 1f
8c56208a 188#ifndef CONFIG_CPU_MIPSR6
8483b14a 189 PTR_ADDU a0, a2 /* What's left */
930bff88 190 R10KCBARRIER(0(ra))
1da177e4 191#ifdef __MIPSEB__
6d5155c2 192 EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
dd2adea4 193#else
6d5155c2 194 EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
1da177e4 195#endif
8c56208a
LY
196#else
197 PTR_SUBU t0, $0, a2
198 PTR_ADDIU t0, 1
199 STORE_BYTE(0)
200 STORE_BYTE(1)
201#if LONGSIZE == 4
202 EX(sb, a1, 2(a0), .Lbyte_fixup\@)
203#else
204 STORE_BYTE(2)
205 STORE_BYTE(3)
206 STORE_BYTE(4)
207 STORE_BYTE(5)
208 EX(sb, a1, 6(a0), .Lbyte_fixup\@)
209#endif
2100:
211#endif
1da177e4 2121: jr ra
8483b14a 213 move a2, zero
1da177e4 214
6d5155c2 215.Lsmall_memset\@:
1da177e4 216 beqz a2, 2f
8483b14a 217 PTR_ADDU t1, a0, a2
1da177e4
LT
218
2191: PTR_ADDIU a0, 1 /* fill bytewise */
930bff88 220 R10KCBARRIER(0(ra))
1da177e4 221 bne t1, a0, 1b
8483b14a 222 sb a1, -1(a0)
1da177e4
LT
223
2242: jr ra /* done */
8483b14a 225 move a2, zero
6d5155c2 226 .if __memset == 1
1da177e4 227 END(memset)
6d5155c2
MC
228 .set __memset, 0
229 .hidden __memset
230 .endif
1da177e4 231
8e85f275 232#ifdef CONFIG_CPU_MIPSR6
8c56208a
LY
233.Lbyte_fixup\@:
234 PTR_SUBU a2, $0, t0
235 jr ra
236 PTR_ADDIU a2, 1
8e85f275 237#endif /* CONFIG_CPU_MIPSR6 */
8c56208a 238
6d5155c2 239.Lfirst_fixup\@:
1da177e4 240 jr ra
8483b14a 241 nop
1da177e4 242
6d5155c2 243.Lfwd_fixup\@:
1da177e4 244 PTR_L t0, TI_TASK($28)
1da177e4 245 andi a2, 0x3f
e5674ad6 246 LONG_L t0, THREAD_BUADDR(t0)
1da177e4
LT
247 LONG_ADDU a2, t1
248 jr ra
8483b14a 249 LONG_SUBU a2, t0
1da177e4 250
6d5155c2 251.Lpartial_fixup\@:
1da177e4 252 PTR_L t0, TI_TASK($28)
26c5e07d 253 andi a2, STORMASK
e5674ad6 254 LONG_L t0, THREAD_BUADDR(t0)
1da177e4
LT
255 LONG_ADDU a2, t1
256 jr ra
8483b14a 257 LONG_SUBU a2, t0
1da177e4 258
6d5155c2 259.Llast_fixup\@:
1da177e4 260 jr ra
8483b14a 261 andi v1, a2, STORMASK
6d5155c2
MC
262
263 .endm
264
265/*
266 * memset(void *s, int c, size_t n)
267 *
268 * a0: start of area to clear
269 * a1: char to fill with
270 * a2: size of area to clear
271 */
272
273LEAF(memset)
576a2f0c 274EXPORT_SYMBOL(memset)
6d5155c2
MC
275 beqz a1, 1f
276 move v0, a0 /* result */
277
278 andi a1, 0xff /* spread fillword */
279 LONG_SLL t1, a1, 8
280 or a1, t1
281 LONG_SLL t1, a1, 16
282#if LONGSIZE == 8
283 or a1, t1
284 LONG_SLL t1, a1, 32
285#endif
286 or a1, t1
2871:
fd9720e9 288#ifndef CONFIG_EVA
6d5155c2 289FEXPORT(__bzero)
576a2f0c 290EXPORT_SYMBOL(__bzero)
d6a428fb
JH
291#else
292FEXPORT(__bzero_kernel)
576a2f0c 293EXPORT_SYMBOL(__bzero_kernel)
fd9720e9 294#endif
6d5155c2 295 __BUILD_BZERO LEGACY_MODE
fd9720e9
MC
296
297#ifdef CONFIG_EVA
298LEAF(__bzero)
576a2f0c 299EXPORT_SYMBOL(__bzero)
fd9720e9
MC
300 __BUILD_BZERO EVA_MODE
301END(__bzero)
302#endif