-6
arch/x86/include/asm/bitops.h
-6
arch/x86/include/asm/bitops.h
···
36
36
* bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
37
37
*/
38
38
39
-
#if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 1)
40
-
/* Technically wrong, but this avoids compilation errors on some gcc
41
-
versions. */
42
-
#define BITOP_ADDR(x) "=m" (*(volatile long *) (x))
43
-
#else
44
39
#define BITOP_ADDR(x) "+m" (*(volatile long *) (x))
45
-
#endif
46
40
47
41
#define ADDR BITOP_ADDR(addr)
48
42
-104
arch/x86/include/asm/string_32.h
-104
arch/x86/include/asm/string_32.h
···
179
179
* No 3D Now!
180
180
*/
181
181
182
-
#if (__GNUC__ >= 4)
183
182
#define memcpy(t, f, n) __builtin_memcpy(t, f, n)
184
-
#else
185
-
#define memcpy(t, f, n) \
186
-
(__builtin_constant_p((n)) \
187
-
? __constant_memcpy((t), (f), (n)) \
188
-
: __memcpy((t), (f), (n)))
189
-
#endif
190
183
191
184
#endif
192
185
#endif /* !CONFIG_FORTIFY_SOURCE */
···
209
216
/* we might want to write optimized versions of these later */
210
217
#define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count))
211
218
212
-
/*
213
-
* memset(x, 0, y) is a reasonably common thing to do, so we want to fill
214
-
* things 32 bits at a time even when we don't know the size of the
215
-
* area at compile-time..
216
-
*/
217
-
static __always_inline
218
-
void *__constant_c_memset(void *s, unsigned long c, size_t count)
219
-
{
220
-
int d0, d1;
221
-
asm volatile("rep ; stosl\n\t"
222
-
"testb $2,%b3\n\t"
223
-
"je 1f\n\t"
224
-
"stosw\n"
225
-
"1:\ttestb $1,%b3\n\t"
226
-
"je 2f\n\t"
227
-
"stosb\n"
228
-
"2:"
229
-
: "=&c" (d0), "=&D" (d1)
230
-
: "a" (c), "q" (count), "0" (count/4), "1" ((long)s)
231
-
: "memory");
232
-
return s;
233
-
}
234
-
235
219
/* Added by Gertjan van Wingerde to make minix and sysv module work */
236
220
#define __HAVE_ARCH_STRNLEN
237
221
extern size_t strnlen(const char *s, size_t count);
···
216
246
217
247
#define __HAVE_ARCH_STRSTR
218
248
extern char *strstr(const char *cs, const char *ct);
219
-
220
-
/*
221
-
* This looks horribly ugly, but the compiler can optimize it totally,
222
-
* as we by now know that both pattern and count is constant..
223
-
*/
224
-
static __always_inline
225
-
void *__constant_c_and_count_memset(void *s, unsigned long pattern,
226
-
size_t count)
227
-
{
228
-
switch (count) {
229
-
case 0:
230
-
return s;
231
-
case 1:
232
-
*(unsigned char *)s = pattern & 0xff;
233
-
return s;
234
-
case 2:
235
-
*(unsigned short *)s = pattern & 0xffff;
236
-
return s;
237
-
case 3:
238
-
*(unsigned short *)s = pattern & 0xffff;
239
-
*((unsigned char *)s + 2) = pattern & 0xff;
240
-
return s;
241
-
case 4:
242
-
*(unsigned long *)s = pattern;
243
-
return s;
244
-
}
245
-
246
-
#define COMMON(x) \
247
-
asm volatile("rep ; stosl" \
248
-
x \
249
-
: "=&c" (d0), "=&D" (d1) \
250
-
: "a" (eax), "0" (count/4), "1" ((long)s) \
251
-
: "memory")
252
-
253
-
{
254
-
int d0, d1;
255
-
#if __GNUC__ == 4 && __GNUC_MINOR__ == 0
256
-
/* Workaround for broken gcc 4.0 */
257
-
register unsigned long eax asm("%eax") = pattern;
258
-
#else
259
-
unsigned long eax = pattern;
260
-
#endif
261
-
262
-
switch (count % 4) {
263
-
case 0:
264
-
COMMON("");
265
-
return s;
266
-
case 1:
267
-
COMMON("\n\tstosb");
268
-
return s;
269
-
case 2:
270
-
COMMON("\n\tstosw");
271
-
return s;
272
-
default:
273
-
COMMON("\n\tstosw\n\tstosb");
274
-
return s;
275
-
}
276
-
}
277
-
278
-
#undef COMMON
279
-
}
280
-
281
-
#define __constant_c_x_memset(s, c, count) \
282
-
(__builtin_constant_p(count) \
283
-
? __constant_c_and_count_memset((s), (c), (count)) \
284
-
: __constant_c_memset((s), (c), (count)))
285
249
286
250
#define __memset(s, c, count) \
287
251
(__builtin_constant_p(count) \
···
225
321
#define __HAVE_ARCH_MEMSET
226
322
extern void *memset(void *, int, size_t);
227
323
#ifndef CONFIG_FORTIFY_SOURCE
228
-
#if (__GNUC__ >= 4)
229
324
#define memset(s, c, count) __builtin_memset(s, c, count)
230
-
#else
231
-
#define memset(s, c, count) \
232
-
(__builtin_constant_p(c) \
233
-
? __constant_c_x_memset((s), (0x01010101UL * (unsigned char)(c)), \
234
-
(count)) \
235
-
: __memset((s), (c), (count)))
236
-
#endif
237
325
#endif /* !CONFIG_FORTIFY_SOURCE */
238
326
239
327
#define __HAVE_ARCH_MEMSET16
-15
arch/x86/include/asm/string_64.h
-15
arch/x86/include/asm/string_64.h
···
14
14
extern void *memcpy(void *to, const void *from, size_t len);
15
15
extern void *__memcpy(void *to, const void *from, size_t len);
16
16
17
-
#ifndef CONFIG_FORTIFY_SOURCE
18
-
#if (__GNUC__ == 4 && __GNUC_MINOR__ < 3) || __GNUC__ < 4
19
-
#define memcpy(dst, src, len) \
20
-
({ \
21
-
size_t __len = (len); \
22
-
void *__ret; \
23
-
if (__builtin_constant_p(len) && __len >= 64) \
24
-
__ret = __memcpy((dst), (src), __len); \
25
-
else \
26
-
__ret = __builtin_memcpy((dst), (src), __len); \
27
-
__ret; \
28
-
})
29
-
#endif
30
-
#endif /* !CONFIG_FORTIFY_SOURCE */
31
-
32
17
#define __HAVE_ARCH_MEMSET
33
18
void *memset(void *s, int c, size_t n);
34
19
void *__memset(void *s, int c, size_t n);