1 #ifndef _I386_STRING_H_
2 #define _I386_STRING_H_
5 #include <linux/config.h>
7 * On a 486 or Pentium, we are better off not using the
8 * byte string operations. But on a 386 or a PPro the
9 * byte string ops are faster than doing it by hand
10 * (MUCH faster on a Pentium).
12 * Also, the byte strings actually work correctly. Forget
13 * the i486 routines for now as they may be broken..
15 #if FIXED_486_STRING && defined(CONFIG_X86_USE_STRING_486)
16 #include <asm/string-486.h>
20 * This string-include defines all string functions as inline
21 * functions. Use gcc. It also assumes ds=es=data space, this should be
22 * normal. Most of the string-functions are rather heavily hand-optimized,
23 * see especially strtok,strstr,str[c]spn. They should work, but are not
24 * very easy to understand. Everything is done entirely within the register
25 * set, making the functions fast and clean. String instructions have been
26 * used through-out, making for "slightly" unclear code :-)
28 * NO Copyright (C) 1991, 1992 Linus Torvalds,
29 * consider these trivial functions to be PD.
32 #define __HAVE_ARCH_STRCPY
33 static inline char * strcpy(char * dest,const char *src)
41 : "=&S" (d0), "=&D" (d1), "=&a" (d2)
42 :"0" (src),"1" (dest) : "memory");
46 #define __HAVE_ARCH_STRNCPY
47 static inline char * strncpy(char * dest,const char *src,size_t count)
60 : "=&S" (d0), "=&D" (d1), "=&c" (d2), "=&a" (d3)
61 :"0" (src),"1" (dest),"2" (count) : "memory");
65 #define __HAVE_ARCH_STRCAT
66 static inline char * strcat(char * dest,const char * src)
77 : "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
78 : "0" (src), "1" (dest), "2" (0), "3" (0xffffffff):"memory");
82 #define __HAVE_ARCH_STRNCAT
83 static inline char * strncat(char * dest,const char * src,size_t count)
99 : "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
100 : "0" (src),"1" (dest),"2" (0),"3" (0xffffffff), "g" (count)
105 #define __HAVE_ARCH_STRCMP
106 static inline int strcmp(const char * cs,const char * ct)
110 __asm__ __volatile__(
114 "testb %%al,%%al\n\t"
116 "xorl %%eax,%%eax\n\t"
118 "2:\tsbbl %%eax,%%eax\n\t"
121 :"=a" (__res), "=&S" (d0), "=&D" (d1)
126 #define __HAVE_ARCH_STRNCMP
127 static inline int strncmp(const char * cs,const char * ct,size_t count)
131 __asm__ __volatile__(
137 "testb %%al,%%al\n\t"
139 "2:\txorl %%eax,%%eax\n\t"
141 "3:\tsbbl %%eax,%%eax\n\t"
144 :"=a" (__res), "=&S" (d0), "=&D" (d1), "=&c" (d2)
145 :"1" (cs),"2" (ct),"3" (count));
149 #define __HAVE_ARCH_STRCHR
150 static inline char * strchr(const char * s, int c)
153 register char * __res;
154 __asm__ __volatile__(
159 "testb %%al,%%al\n\t"
164 :"=a" (__res), "=&S" (d0) : "1" (s),"0" (c));
168 #define __HAVE_ARCH_STRRCHR
169 static inline char * strrchr(const char * s, int c)
172 register char * __res;
173 __asm__ __volatile__(
178 "leal -1(%%esi),%0\n"
179 "2:\ttestb %%al,%%al\n\t"
181 :"=g" (__res), "=&S" (d0), "=&a" (d1) :"0" (0),"1" (s),"2" (c));
185 #define __HAVE_ARCH_STRLEN
186 static inline size_t strlen(const char * s)
190 __asm__ __volatile__(
195 :"=c" (__res), "=&D" (d0) :"1" (s),"a" (0), "0" (0xffffffff));
199 static inline void * __memcpy(void * to, const void * from, size_t n)
202 __asm__ __volatile__(
207 "1:\ttestb $1,%b4\n\t"
211 : "=&c" (d0), "=&D" (d1), "=&S" (d2)
212 :"0" (n/4), "q" (n),"1" ((long) to),"2" ((long) from)
218 * This looks horribly ugly, but the compiler can optimize it totally,
219 * as the count is constant.
221 static inline void * __constant_memcpy(void * to, const void * from, size_t n)
227 *(unsigned char *)to = *(const unsigned char *)from;
230 *(unsigned short *)to = *(const unsigned short *)from;
233 *(unsigned short *)to = *(const unsigned short *)from;
234 *(2+(unsigned char *)to) = *(2+(const unsigned char *)from);
237 *(unsigned long *)to = *(const unsigned long *)from;
239 case 6: /* for Ethernet addresses */
240 *(unsigned long *)to = *(const unsigned long *)from;
241 *(2+(unsigned short *)to) = *(2+(const unsigned short *)from);
244 *(unsigned long *)to = *(const unsigned long *)from;
245 *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
248 *(unsigned long *)to = *(const unsigned long *)from;
249 *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
250 *(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
253 *(unsigned long *)to = *(const unsigned long *)from;
254 *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
255 *(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
256 *(3+(unsigned long *)to) = *(3+(const unsigned long *)from);
259 *(unsigned long *)to = *(const unsigned long *)from;
260 *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
261 *(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
262 *(3+(unsigned long *)to) = *(3+(const unsigned long *)from);
263 *(4+(unsigned long *)to) = *(4+(const unsigned long *)from);
267 __asm__ __volatile__( \
270 : "=&c" (d0), "=&D" (d1), "=&S" (d2) \
271 : "0" (n/4),"1" ((long) to),"2" ((long) from) \
276 case 0: COMMON(""); return to;
277 case 1: COMMON("\n\tmovsb"); return to;
278 case 2: COMMON("\n\tmovsw"); return to;
279 default: COMMON("\n\tmovsw\n\tmovsb"); return to;
286 #define __HAVE_ARCH_MEMCPY
288 #ifdef CONFIG_X86_USE_3DNOW
293 * This CPU favours 3DNow strongly (eg AMD Athlon)
296 static inline void * __constant_memcpy3d(void * to, const void * from, size_t len)
299 return __constant_memcpy(to, from, len);
300 return _mmx_memcpy(to, from, len);
303 static __inline__ void *__memcpy3d(void *to, const void *from, size_t len)
306 return __memcpy(to, from, len);
307 return _mmx_memcpy(to, from, len);
310 #define memcpy(t, f, n) \
311 (__builtin_constant_p(n) ? \
312 __constant_memcpy3d((t),(f),(n)) : \
313 __memcpy3d((t),(f),(n)))
321 #define memcpy(t, f, n) \
322 (__builtin_constant_p(n) ? \
323 __constant_memcpy((t),(f),(n)) : \
324 __memcpy((t),(f),(n)))
329 * struct_cpy(x,y), copy structure *x into (matching structure) *y.
331 * We get link-time errors if the structure sizes do not match.
332 * There is no runtime overhead, it's all optimized away at
335 extern void __struct_cpy_bug (void);
337 #define struct_cpy(x,y) \
339 if (sizeof(*(x)) != sizeof(*(y))) \
340 __struct_cpy_bug(); \
341 memcpy(x, y, sizeof(*(x))); \
344 #define __HAVE_ARCH_MEMMOVE
345 static inline void * memmove(void * dest,const void * src, size_t n)
349 __asm__ __volatile__(
352 : "=&c" (d0), "=&S" (d1), "=&D" (d2)
353 :"0" (n),"1" (src),"2" (dest)
356 __asm__ __volatile__(
361 : "=&c" (d0), "=&S" (d1), "=&D" (d2)
363 "1" (n-1+(const char *)src),
364 "2" (n-1+(char *)dest)
369 #define memcmp __builtin_memcmp
371 #define __HAVE_ARCH_MEMCHR
372 static inline void * memchr(const void * cs,int c,size_t count)
375 register void * __res;
378 __asm__ __volatile__(
384 :"=D" (__res), "=&c" (d0) : "a" (c),"0" (cs),"1" (count));
388 static inline void * __memset_generic(void * s, char c,size_t count)
391 __asm__ __volatile__(
394 : "=&c" (d0), "=&D" (d1)
395 :"a" (c),"1" (s),"0" (count)
400 /* we might want to write optimized versions of these later */
401 #define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count))
404 * memset(x,0,y) is a reasonably common thing to do, so we want to fill
405 * things 32 bits at a time even when we don't know the size of the
406 * area at compile-time..
408 static inline void * __constant_c_memset(void * s, unsigned long c, size_t count)
411 __asm__ __volatile__(
416 "1:\ttestb $1,%b3\n\t"
420 : "=&c" (d0), "=&D" (d1)
421 :"a" (c), "q" (count), "0" (count/4), "1" ((long) s)
426 /* Added by Gertjan van Wingerde to make minix and sysv module work */
427 #define __HAVE_ARCH_STRNLEN
428 static inline size_t strnlen(const char * s, size_t count)
432 __asm__ __volatile__(
435 "1:\tcmpb $0,(%0)\n\t"
442 :"=a" (__res), "=&d" (d0)
443 :"c" (s),"1" (count));
446 /* end of additional stuff */
448 #define __HAVE_ARCH_STRSTR
450 extern char *strstr(const char *cs, const char *ct);
453 * This looks horribly ugly, but the compiler can optimize it totally,
454 * as we by now know that both pattern and count is constant..
456 static inline void * __constant_c_and_count_memset(void * s, unsigned long pattern, size_t count)
462 *(unsigned char *)s = pattern;
465 *(unsigned short *)s = pattern;
468 *(unsigned short *)s = pattern;
469 *(2+(unsigned char *)s) = pattern;
472 *(unsigned long *)s = pattern;
476 __asm__ __volatile__( \
479 : "=&c" (d0), "=&D" (d1) \
480 : "a" (pattern),"0" (count/4),"1" ((long) s) \
485 case 0: COMMON(""); return s;
486 case 1: COMMON("\n\tstosb"); return s;
487 case 2: COMMON("\n\tstosw"); return s;
488 default: COMMON("\n\tstosw\n\tstosb"); return s;
495 #define __constant_c_x_memset(s, c, count) \
496 (__builtin_constant_p(count) ? \
497 __constant_c_and_count_memset((s),(c),(count)) : \
498 __constant_c_memset((s),(c),(count)))
500 #define __memset(s, c, count) \
501 (__builtin_constant_p(count) ? \
502 __constant_count_memset((s),(c),(count)) : \
503 __memset_generic((s),(c),(count)))
505 #define __HAVE_ARCH_MEMSET
506 #define memset(s, c, count) \
507 (__builtin_constant_p(c) ? \
508 __constant_c_x_memset((s),(0x01010101UL*(unsigned char)(c)),(count)) : \
509 __memset((s),(c),(count)))
512 * find the first occurrence of byte 'c', or 1 past the area if none
514 #define __HAVE_ARCH_MEMSCAN
515 static inline void * memscan(void * addr, int c, size_t size)
519 __asm__("repnz; scasb\n\t"
523 : "=D" (addr), "=c" (size)
524 : "0" (addr), "1" (size), "a" (c));
528 #endif /* CONFIG_X86_USE_STRING_486 */
529 #endif /* __KERNEL__ */