at v5.3 203 lines 5.5 kB view raw
1/* SPDX-License-Identifier: GPL-2.0 */ 2/* 3 * S390 version 4 * Copyright IBM Corp. 1999 5 * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com), 6 */ 7 8#ifndef _S390_STRING_H_ 9#define _S390_STRING_H_ 10 11#ifndef _LINUX_TYPES_H 12#include <linux/types.h> 13#endif 14 15#define __HAVE_ARCH_MEMCPY /* gcc builtin & arch function */ 16#define __HAVE_ARCH_MEMMOVE /* gcc builtin & arch function */ 17#define __HAVE_ARCH_MEMSET /* gcc builtin & arch function */ 18#define __HAVE_ARCH_MEMSET16 /* arch function */ 19#define __HAVE_ARCH_MEMSET32 /* arch function */ 20#define __HAVE_ARCH_MEMSET64 /* arch function */ 21 22void *memcpy(void *dest, const void *src, size_t n); 23void *memset(void *s, int c, size_t n); 24void *memmove(void *dest, const void *src, size_t n); 25 26#ifndef CONFIG_KASAN 27#define __HAVE_ARCH_MEMCHR /* inline & arch function */ 28#define __HAVE_ARCH_MEMCMP /* arch function */ 29#define __HAVE_ARCH_MEMSCAN /* inline & arch function */ 30#define __HAVE_ARCH_STRCAT /* inline & arch function */ 31#define __HAVE_ARCH_STRCMP /* arch function */ 32#define __HAVE_ARCH_STRCPY /* inline & arch function */ 33#define __HAVE_ARCH_STRLCAT /* arch function */ 34#define __HAVE_ARCH_STRLCPY /* arch function */ 35#define __HAVE_ARCH_STRLEN /* inline & arch function */ 36#define __HAVE_ARCH_STRNCAT /* arch function */ 37#define __HAVE_ARCH_STRNCPY /* arch function */ 38#define __HAVE_ARCH_STRNLEN /* inline & arch function */ 39#define __HAVE_ARCH_STRRCHR /* arch function */ 40#define __HAVE_ARCH_STRSTR /* arch function */ 41 42/* Prototypes for non-inlined arch strings functions. */ 43int memcmp(const void *s1, const void *s2, size_t n); 44int strcmp(const char *s1, const char *s2); 45size_t strlcat(char *dest, const char *src, size_t n); 46size_t strlcpy(char *dest, const char *src, size_t size); 47char *strncat(char *dest, const char *src, size_t n); 48char *strncpy(char *dest, const char *src, size_t n); 49char *strrchr(const char *s, int c); 50char *strstr(const char *s1, const char *s2); 51#endif /* !CONFIG_KASAN */ 52 53#undef __HAVE_ARCH_STRCHR 54#undef __HAVE_ARCH_STRNCHR 55#undef __HAVE_ARCH_STRNCMP 56#undef __HAVE_ARCH_STRPBRK 57#undef __HAVE_ARCH_STRSEP 58#undef __HAVE_ARCH_STRSPN 59 60#if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__) 61 62extern void *__memcpy(void *dest, const void *src, size_t n); 63extern void *__memset(void *s, int c, size_t n); 64extern void *__memmove(void *dest, const void *src, size_t n); 65 66/* 67 * For files that are not instrumented (e.g. mm/slub.c) we 68 * should use not instrumented version of mem* functions. 69 */ 70 71#define memcpy(dst, src, len) __memcpy(dst, src, len) 72#define memmove(dst, src, len) __memmove(dst, src, len) 73#define memset(s, c, n) __memset(s, c, n) 74 75#ifndef __NO_FORTIFY 76#define __NO_FORTIFY /* FORTIFY_SOURCE uses __builtin_memcpy, etc. */ 77#endif 78 79#endif /* defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__) */ 80 81void *__memset16(uint16_t *s, uint16_t v, size_t count); 82void *__memset32(uint32_t *s, uint32_t v, size_t count); 83void *__memset64(uint64_t *s, uint64_t v, size_t count); 84 85static inline void *memset16(uint16_t *s, uint16_t v, size_t count) 86{ 87 return __memset16(s, v, count * sizeof(v)); 88} 89 90static inline void *memset32(uint32_t *s, uint32_t v, size_t count) 91{ 92 return __memset32(s, v, count * sizeof(v)); 93} 94 95static inline void *memset64(uint64_t *s, uint64_t v, size_t count) 96{ 97 return __memset64(s, v, count * sizeof(v)); 98} 99 100#if !defined(IN_ARCH_STRING_C) && (!defined(CONFIG_FORTIFY_SOURCE) || defined(__NO_FORTIFY)) 101 102#ifdef __HAVE_ARCH_MEMCHR 103static inline void *memchr(const void * s, int c, size_t n) 104{ 105 register int r0 asm("0") = (char) c; 106 const void *ret = s + n; 107 108 asm volatile( 109 "0: srst %0,%1\n" 110 " jo 0b\n" 111 " jl 1f\n" 112 " la %0,0\n" 113 "1:" 114 : "+a" (ret), "+&a" (s) : "d" (r0) : "cc", "memory"); 115 return (void *) ret; 116} 117#endif 118 119#ifdef __HAVE_ARCH_MEMSCAN 120static inline void *memscan(void *s, int c, size_t n) 121{ 122 register int r0 asm("0") = (char) c; 123 const void *ret = s + n; 124 125 asm volatile( 126 "0: srst %0,%1\n" 127 " jo 0b\n" 128 : "+a" (ret), "+&a" (s) : "d" (r0) : "cc", "memory"); 129 return (void *) ret; 130} 131#endif 132 133#ifdef __HAVE_ARCH_STRCAT 134static inline char *strcat(char *dst, const char *src) 135{ 136 register int r0 asm("0") = 0; 137 unsigned long dummy; 138 char *ret = dst; 139 140 asm volatile( 141 "0: srst %0,%1\n" 142 " jo 0b\n" 143 "1: mvst %0,%2\n" 144 " jo 1b" 145 : "=&a" (dummy), "+a" (dst), "+a" (src) 146 : "d" (r0), "0" (0) : "cc", "memory" ); 147 return ret; 148} 149#endif 150 151#ifdef __HAVE_ARCH_STRCPY 152static inline char *strcpy(char *dst, const char *src) 153{ 154 register int r0 asm("0") = 0; 155 char *ret = dst; 156 157 asm volatile( 158 "0: mvst %0,%1\n" 159 " jo 0b" 160 : "+&a" (dst), "+&a" (src) : "d" (r0) 161 : "cc", "memory"); 162 return ret; 163} 164#endif 165 166#ifdef __HAVE_ARCH_STRLEN 167static inline size_t strlen(const char *s) 168{ 169 register unsigned long r0 asm("0") = 0; 170 const char *tmp = s; 171 172 asm volatile( 173 "0: srst %0,%1\n" 174 " jo 0b" 175 : "+d" (r0), "+a" (tmp) : : "cc", "memory"); 176 return r0 - (unsigned long) s; 177} 178#endif 179 180#ifdef __HAVE_ARCH_STRNLEN 181static inline size_t strnlen(const char * s, size_t n) 182{ 183 register int r0 asm("0") = 0; 184 const char *tmp = s; 185 const char *end = s + n; 186 187 asm volatile( 188 "0: srst %0,%1\n" 189 " jo 0b" 190 : "+a" (end), "+a" (tmp) : "d" (r0) : "cc", "memory"); 191 return end - s; 192} 193#endif 194#else /* IN_ARCH_STRING_C */ 195void *memchr(const void * s, int c, size_t n); 196void *memscan(void *s, int c, size_t n); 197char *strcat(char *dst, const char *src); 198char *strcpy(char *dst, const char *src); 199size_t strlen(const char *s); 200size_t strnlen(const char * s, size_t n); 201#endif /* !IN_ARCH_STRING_C */ 202 203#endif /* __S390_STRING_H_ */