Linux kernel mirror (for testing)
git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel
os
linux
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
8 * Copyright (C) 2007 by Maciej W. Rozycki
9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc.
10 */
11#include <asm/asm.h>
12#include <asm/asm-offsets.h>
13#include <asm/regdef.h>
14
15#if LONGSIZE == 4
16#define LONG_S_L swl
17#define LONG_S_R swr
18#else
19#define LONG_S_L sdl
20#define LONG_S_R sdr
21#endif
22
23#ifdef CONFIG_CPU_MICROMIPS
24#define STORSIZE (LONGSIZE * 2)
25#define STORMASK (STORSIZE - 1)
26#define FILL64RG t8
27#define FILLPTRG t7
28#undef LONG_S
29#define LONG_S LONG_SP
30#else
31#define STORSIZE LONGSIZE
32#define STORMASK LONGMASK
33#define FILL64RG a1
34#define FILLPTRG t0
35#endif
36
37#define LEGACY_MODE 1
38#define EVA_MODE 2
39
40/*
41 * No need to protect it with EVA #ifdefery. The generated block of code
42 * will never be assembled if EVA is not enabled.
43 */
44#define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
45#define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
46
47#define EX(insn,reg,addr,handler) \
48 .if \mode == LEGACY_MODE; \
499: insn reg, addr; \
50 .else; \
519: ___BUILD_EVA_INSN(insn, reg, addr); \
52 .endif; \
53 .section __ex_table,"a"; \
54 PTR 9b, handler; \
55 .previous
56
57 .macro f_fill64 dst, offset, val, fixup, mode
58 EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup)
59 EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup)
60 EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup)
61 EX(LONG_S, \val, (\offset + 3 * STORSIZE)(\dst), \fixup)
62#if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
63 EX(LONG_S, \val, (\offset + 4 * STORSIZE)(\dst), \fixup)
64 EX(LONG_S, \val, (\offset + 5 * STORSIZE)(\dst), \fixup)
65 EX(LONG_S, \val, (\offset + 6 * STORSIZE)(\dst), \fixup)
66 EX(LONG_S, \val, (\offset + 7 * STORSIZE)(\dst), \fixup)
67#endif
68#if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
69 EX(LONG_S, \val, (\offset + 8 * STORSIZE)(\dst), \fixup)
70 EX(LONG_S, \val, (\offset + 9 * STORSIZE)(\dst), \fixup)
71 EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
72 EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
73 EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
74 EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
75 EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
76 EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
77#endif
78 .endm
79
80 .set noreorder
81 .align 5
82
83 /*
84 * Macro to generate the __bzero{,_user} symbol
85 * Arguments:
86 * mode: LEGACY_MODE or EVA_MODE
87 */
88 .macro __BUILD_BZERO mode
89 /* Initialize __memset if this is the first time we call this macro */
90 .ifnotdef __memset
91 .set __memset, 1
92 .hidden __memset /* Make sure it does not leak */
93 .endif
94
95 sltiu t0, a2, STORSIZE /* very small region? */
96 bnez t0, .Lsmall_memset\@
97 andi t0, a0, STORMASK /* aligned? */
98
99#ifdef CONFIG_CPU_MICROMIPS
100 move t8, a1 /* used by 'swp' instruction */
101 move t9, a1
102#endif
103#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
104 beqz t0, 1f
105 PTR_SUBU t0, STORSIZE /* alignment in bytes */
106#else
107 .set noat
108 li AT, STORSIZE
109 beqz t0, 1f
110 PTR_SUBU t0, AT /* alignment in bytes */
111 .set at
112#endif
113
114 R10KCBARRIER(0(ra))
115#ifdef __MIPSEB__
116 EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
117#else
118 EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
119#endif
120 PTR_SUBU a0, t0 /* long align ptr */
121 PTR_ADDU a2, t0 /* correct size */
122
1231: ori t1, a2, 0x3f /* # of full blocks */
124 xori t1, 0x3f
125 beqz t1, .Lmemset_partial\@ /* no block to fill */
126 andi t0, a2, 0x40-STORSIZE
127
128 PTR_ADDU t1, a0 /* end address */
129 .set reorder
1301: PTR_ADDIU a0, 64
131 R10KCBARRIER(0(ra))
132 f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
133 bne t1, a0, 1b
134 .set noreorder
135
136.Lmemset_partial\@:
137 R10KCBARRIER(0(ra))
138 PTR_LA t1, 2f /* where to start */
139#ifdef CONFIG_CPU_MICROMIPS
140 LONG_SRL t7, t0, 1
141#endif
142#if LONGSIZE == 4
143 PTR_SUBU t1, FILLPTRG
144#else
145 .set noat
146 LONG_SRL AT, FILLPTRG, 1
147 PTR_SUBU t1, AT
148 .set at
149#endif
150 jr t1
151 PTR_ADDU a0, t0 /* dest ptr */
152
153 .set push
154 .set noreorder
155 .set nomacro
156 /* ... but first do longs ... */
157 f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
1582: .set pop
159 andi a2, STORMASK /* At most one long to go */
160
161 beqz a2, 1f
162 PTR_ADDU a0, a2 /* What's left */
163 R10KCBARRIER(0(ra))
164#ifdef __MIPSEB__
165 EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
166#else
167 EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
168#endif
1691: jr ra
170 move a2, zero
171
172.Lsmall_memset\@:
173 beqz a2, 2f
174 PTR_ADDU t1, a0, a2
175
1761: PTR_ADDIU a0, 1 /* fill bytewise */
177 R10KCBARRIER(0(ra))
178 bne t1, a0, 1b
179 sb a1, -1(a0)
180
1812: jr ra /* done */
182 move a2, zero
183 .if __memset == 1
184 END(memset)
185 .set __memset, 0
186 .hidden __memset
187 .endif
188
189.Lfirst_fixup\@:
190 jr ra
191 nop
192
193.Lfwd_fixup\@:
194 PTR_L t0, TI_TASK($28)
195 andi a2, 0x3f
196 LONG_L t0, THREAD_BUADDR(t0)
197 LONG_ADDU a2, t1
198 jr ra
199 LONG_SUBU a2, t0
200
201.Lpartial_fixup\@:
202 PTR_L t0, TI_TASK($28)
203 andi a2, STORMASK
204 LONG_L t0, THREAD_BUADDR(t0)
205 LONG_ADDU a2, t1
206 jr ra
207 LONG_SUBU a2, t0
208
209.Llast_fixup\@:
210 jr ra
211 andi v1, a2, STORMASK
212
213 .endm
214
215/*
216 * memset(void *s, int c, size_t n)
217 *
218 * a0: start of area to clear
219 * a1: char to fill with
220 * a2: size of area to clear
221 */
222
223LEAF(memset)
224 beqz a1, 1f
225 move v0, a0 /* result */
226
227 andi a1, 0xff /* spread fillword */
228 LONG_SLL t1, a1, 8
229 or a1, t1
230 LONG_SLL t1, a1, 16
231#if LONGSIZE == 8
232 or a1, t1
233 LONG_SLL t1, a1, 32
234#endif
235 or a1, t1
2361:
237#ifndef CONFIG_EVA
238FEXPORT(__bzero)
239#endif
240 __BUILD_BZERO LEGACY_MODE
241
242#ifdef CONFIG_EVA
243LEAF(__bzero)
244 __BUILD_BZERO EVA_MODE
245END(__bzero)
246#endif