1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_X86_STRING_32_H
3 #define _ASM_X86_STRING_32_H
7 /* Let gcc decide whether to inline or use the out of line functions */
9 #define __HAVE_ARCH_STRCPY
10 extern char *strcpy(char *dest, const char *src);
12 #define __HAVE_ARCH_STRNCPY
13 extern char *strncpy(char *dest, const char *src, size_t count);
15 #define __HAVE_ARCH_STRCAT
16 extern char *strcat(char *dest, const char *src);
18 #define __HAVE_ARCH_STRNCAT
19 extern char *strncat(char *dest, const char *src, size_t count);
21 #define __HAVE_ARCH_STRCMP
22 extern int strcmp(const char *cs, const char *ct);
24 #define __HAVE_ARCH_STRNCMP
25 extern int strncmp(const char *cs, const char *ct, size_t count);
27 #define __HAVE_ARCH_STRCHR
28 extern char *strchr(const char *s, int c);
30 #define __HAVE_ARCH_STRLEN
31 extern size_t strlen(const char *s);
33 static __always_inline void *__memcpy(void *to, const void *from, size_t n)
36 asm volatile("rep ; movsl\n\t"
42 : "=&c" (d0), "=&D" (d1), "=&S" (d2)
43 : "0" (n / 4), "g" (n), "1" ((long)to), "2" ((long)from)
49 * This looks ugly, but the compiler can optimize it totally,
50 * as the count is constant.
52 static __always_inline void *__constant_memcpy(void *to, const void *from,
61 *(char *)to = *(char *)from;
64 *(short *)to = *(short *)from;
67 *(int *)to = *(int *)from;
70 *(short *)to = *(short *)from;
71 *((char *)to + 2) = *((char *)from + 2);
74 *(int *)to = *(int *)from;
75 *((char *)to + 4) = *((char *)from + 4);
78 *(int *)to = *(int *)from;
79 *((short *)to + 2) = *((short *)from + 2);
82 *(int *)to = *(int *)from;
83 *((int *)to + 1) = *((int *)from + 1);
90 /* large block: use rep prefix */
92 asm volatile("rep ; movsl"
93 : "=&c" (ecx), "=&D" (edi), "=&S" (esi)
94 : "0" (n / 4), "1" (edi), "2" (esi)
98 /* small block: don't clobber ecx + smaller code */
101 : "=&D"(edi), "=&S"(esi)
106 : "=&D"(edi), "=&S"(esi)
111 : "=&D"(edi), "=&S"(esi)
116 : "=&D"(edi), "=&S"(esi)
126 : "=&D"(edi), "=&S"(esi)
132 : "=&D"(edi), "=&S"(esi)
137 asm volatile("movsw\n\tmovsb"
138 : "=&D"(edi), "=&S"(esi)
145 #define __HAVE_ARCH_MEMCPY
146 extern void *memcpy(void *, const void *, size_t);
148 #ifndef CONFIG_FORTIFY_SOURCE
149 #ifdef CONFIG_X86_USE_3DNOW
154 * This CPU favours 3DNow strongly (eg AMD Athlon)
157 static inline void *__constant_memcpy3d(void *to, const void *from, size_t len)
160 return __constant_memcpy(to, from, len);
161 return _mmx_memcpy(to, from, len);
164 static inline void *__memcpy3d(void *to, const void *from, size_t len)
167 return __memcpy(to, from, len);
168 return _mmx_memcpy(to, from, len);
171 #define memcpy(t, f, n) \
172 (__builtin_constant_p((n)) \
173 ? __constant_memcpy3d((t), (f), (n)) \
174 : __memcpy3d((t), (f), (n)))
182 #ifndef CONFIG_KMEMCHECK
185 #define memcpy(t, f, n) __builtin_memcpy(t, f, n)
187 #define memcpy(t, f, n) \
188 (__builtin_constant_p((n)) \
189 ? __constant_memcpy((t), (f), (n)) \
190 : __memcpy((t), (f), (n)))
194 * kmemcheck becomes very happy if we use the REP instructions unconditionally,
195 * because it means that we know both memory operands in advance.
197 #define memcpy(t, f, n) __memcpy((t), (f), (n))
201 #endif /* !CONFIG_FORTIFY_SOURCE */
203 #define __HAVE_ARCH_MEMMOVE
204 void *memmove(void *dest, const void *src, size_t n);
206 extern int memcmp(const void *, const void *, size_t);
207 #ifndef CONFIG_FORTIFY_SOURCE
208 #define memcmp __builtin_memcmp
211 #define __HAVE_ARCH_MEMCHR
212 extern void *memchr(const void *cs, int c, size_t count);
214 static inline void *__memset_generic(void *s, char c, size_t count)
217 asm volatile("rep\n\t"
219 : "=&c" (d0), "=&D" (d1)
220 : "a" (c), "1" (s), "0" (count)
225 /* we might want to write optimized versions of these later */
226 #define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count))
229 * memset(x, 0, y) is a reasonably common thing to do, so we want to fill
230 * things 32 bits at a time even when we don't know the size of the
231 * area at compile-time..
233 static __always_inline
234 void *__constant_c_memset(void *s, unsigned long c, size_t count)
237 asm volatile("rep ; stosl\n\t"
241 "1:\ttestb $1,%b3\n\t"
245 : "=&c" (d0), "=&D" (d1)
246 : "a" (c), "q" (count), "0" (count/4), "1" ((long)s)
251 /* Added by Gertjan van Wingerde to make minix and sysv module work */
252 #define __HAVE_ARCH_STRNLEN
253 extern size_t strnlen(const char *s, size_t count);
254 /* end of additional stuff */
256 #define __HAVE_ARCH_STRSTR
257 extern char *strstr(const char *cs, const char *ct);
260 * This looks horribly ugly, but the compiler can optimize it totally,
261 * as we by now know that both pattern and count is constant..
263 static __always_inline
264 void *__constant_c_and_count_memset(void *s, unsigned long pattern,
271 *(unsigned char *)s = pattern & 0xff;
274 *(unsigned short *)s = pattern & 0xffff;
277 *(unsigned short *)s = pattern & 0xffff;
278 *((unsigned char *)s + 2) = pattern & 0xff;
281 *(unsigned long *)s = pattern;
286 asm volatile("rep ; stosl" \
288 : "=&c" (d0), "=&D" (d1) \
289 : "a" (eax), "0" (count/4), "1" ((long)s) \
294 #if __GNUC__ == 4 && __GNUC_MINOR__ == 0
295 /* Workaround for broken gcc 4.0 */
296 register unsigned long eax asm("%eax") = pattern;
298 unsigned long eax = pattern;
312 COMMON("\n\tstosw\n\tstosb");
320 #define __constant_c_x_memset(s, c, count) \
321 (__builtin_constant_p(count) \
322 ? __constant_c_and_count_memset((s), (c), (count)) \
323 : __constant_c_memset((s), (c), (count)))
325 #define __memset(s, c, count) \
326 (__builtin_constant_p(count) \
327 ? __constant_count_memset((s), (c), (count)) \
328 : __memset_generic((s), (c), (count)))
330 #define __HAVE_ARCH_MEMSET
331 extern void *memset(void *, int, size_t);
332 #ifndef CONFIG_FORTIFY_SOURCE
334 #define memset(s, c, count) __builtin_memset(s, c, count)
336 #define memset(s, c, count) \
337 (__builtin_constant_p(c) \
338 ? __constant_c_x_memset((s), (0x01010101UL * (unsigned char)(c)), \
340 : __memset((s), (c), (count)))
342 #endif /* !CONFIG_FORTIFY_SOURCE */
344 #define __HAVE_ARCH_MEMSET16
345 static inline void *memset16(uint16_t *s, uint16_t v, size_t n)
348 asm volatile("rep\n\t"
350 : "=&c" (d0), "=&D" (d1)
351 : "a" (v), "1" (s), "0" (n)
356 #define __HAVE_ARCH_MEMSET32
357 static inline void *memset32(uint32_t *s, uint32_t v, size_t n)
360 asm volatile("rep\n\t"
362 : "=&c" (d0), "=&D" (d1)
363 : "a" (v), "1" (s), "0" (n)
369 * find the first occurrence of byte 'c', or 1 past the area if none
371 #define __HAVE_ARCH_MEMSCAN
372 extern void *memscan(void *addr, int c, size_t size);
374 #endif /* __KERNEL__ */
376 #endif /* _ASM_X86_STRING_32_H */