1 #ifndef _I386_STRING_H_
2 #define _I386_STRING_H_
5 #include <linux/config.h>
7 * On a 486 or Pentium, we are better off not using the
8 * byte string operations. But on a 386 or a PPro the
9 * byte string ops are faster than doing it by hand
10 * (MUCH faster on a Pentium).
14 * This string-include defines all string functions as inline
15 * functions. Use gcc. It also assumes ds=es=data space, this should be
16 * normal. Most of the string-functions are rather heavily hand-optimized,
17 * see especially strsep,strstr,str[c]spn. They should work, but are not
18 * very easy to understand. Everything is done entirely within the register
19 * set, making the functions fast and clean. String instructions have been
20 * used through-out, making for "slightly" unclear code :-)
22 * NO Copyright (C) 1991, 1992 Linus Torvalds,
23 * consider these trivial functions to be PD.
26 /* AK: in fact I bet it would be better to move this stuff all out of line.
28 #if !defined(IN_STRING_C)
30 static inline char * strcpy(char * dest,const char *src)
38 : "=&S" (d0), "=&D" (d1), "=&a" (d2)
39 :"0" (src),"1" (dest) : "memory");
43 static inline char * strncpy(char * dest,const char *src,size_t count)
56 : "=&S" (d0), "=&D" (d1), "=&c" (d2), "=&a" (d3)
57 :"0" (src),"1" (dest),"2" (count) : "memory");
61 static inline char * strcat(char * dest,const char * src)
72 : "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
73 : "0" (src), "1" (dest), "2" (0), "3" (0xffffffffu):"memory");
77 static inline char * strncat(char * dest,const char * src,size_t count)
93 : "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
94 : "0" (src),"1" (dest),"2" (0),"3" (0xffffffffu), "g" (count)
99 static inline int strcmp(const char * cs,const char * ct)
103 __asm__ __volatile__(
107 "testb %%al,%%al\n\t"
109 "xorl %%eax,%%eax\n\t"
111 "2:\tsbbl %%eax,%%eax\n\t"
114 :"=a" (__res), "=&S" (d0), "=&D" (d1)
119 static inline int strncmp(const char * cs,const char * ct,size_t count)
123 __asm__ __volatile__(
129 "testb %%al,%%al\n\t"
131 "2:\txorl %%eax,%%eax\n\t"
133 "3:\tsbbl %%eax,%%eax\n\t"
136 :"=a" (__res), "=&S" (d0), "=&D" (d1), "=&c" (d2)
137 :"1" (cs),"2" (ct),"3" (count));
141 static inline char * strchr(const char * s, int c)
144 register char * __res;
145 __asm__ __volatile__(
150 "testb %%al,%%al\n\t"
155 :"=a" (__res), "=&S" (d0) : "1" (s),"0" (c));
159 static inline char * strrchr(const char * s, int c)
162 register char * __res;
163 __asm__ __volatile__(
168 "leal -1(%%esi),%0\n"
169 "2:\ttestb %%al,%%al\n\t"
171 :"=g" (__res), "=&S" (d0), "=&a" (d1) :"0" (0),"1" (s),"2" (c));
177 #define __HAVE_ARCH_STRLEN
178 static inline size_t strlen(const char * s)
182 __asm__ __volatile__(
187 :"=c" (__res), "=&D" (d0) :"1" (s),"a" (0), "0" (0xffffffffu));
191 static inline void * __memcpy(void * to, const void * from, size_t n)
194 __asm__ __volatile__(
199 "1:\ttestb $1,%b4\n\t"
203 : "=&c" (d0), "=&D" (d1), "=&S" (d2)
204 :"0" (n/4), "q" (n),"1" ((long) to),"2" ((long) from)
210 * This looks horribly ugly, but the compiler can optimize it totally,
211 * as the count is constant.
213 static inline void * __constant_memcpy(void * to, const void * from, size_t n)
216 return __builtin_memcpy(to, from, n);
219 __asm__ __volatile__( \
222 : "=&c" (d0), "=&D" (d1), "=&S" (d2) \
223 : "0" (n/4),"1" ((long) to),"2" ((long) from) \
228 case 0: COMMON(""); return to;
229 case 1: COMMON("\n\tmovsb"); return to;
230 case 2: COMMON("\n\tmovsw"); return to;
231 default: COMMON("\n\tmovsw\n\tmovsb"); return to;
238 #define __HAVE_ARCH_MEMCPY
240 #ifdef CONFIG_X86_USE_3DNOW
245 * This CPU favours 3DNow strongly (eg AMD Athlon)
248 static inline void * __constant_memcpy3d(void * to, const void * from, size_t len)
251 return __constant_memcpy(to, from, len);
252 return _mmx_memcpy(to, from, len);
255 static __inline__ void *__memcpy3d(void *to, const void *from, size_t len)
258 return __memcpy(to, from, len);
259 return _mmx_memcpy(to, from, len);
262 #define memcpy(t, f, n) \
263 (__builtin_constant_p(n) ? \
264 __constant_memcpy3d((t),(f),(n)) : \
265 __memcpy3d((t),(f),(n)))
273 #define memcpy(t, f, n) \
274 (__builtin_constant_p(n) ? \
275 __constant_memcpy((t),(f),(n)) : \
276 __memcpy((t),(f),(n)))
281 * struct_cpy(x,y), copy structure *x into (matching structure) *y.
283 * We get link-time errors if the structure sizes do not match.
284 * There is no runtime overhead, it's all optimized away at
287 extern void __struct_cpy_bug (void);
289 #define struct_cpy(x,y) \
291 if (sizeof(*(x)) != sizeof(*(y))) \
292 __struct_cpy_bug(); \
293 memcpy(x, y, sizeof(*(x))); \
296 #define __HAVE_ARCH_MEMMOVE
297 static inline void * memmove(void * dest,const void * src, size_t n)
303 __asm__ __volatile__(
308 : "=&c" (d0), "=&S" (d1), "=&D" (d2)
310 "1" (n-1+(const char *)src),
311 "2" (n-1+(char *)dest)
316 #define memcmp __builtin_memcmp
318 #define __HAVE_ARCH_MEMCHR
319 static inline void * memchr(const void * cs,int c,size_t count)
322 register void * __res;
325 __asm__ __volatile__(
331 :"=D" (__res), "=&c" (d0) : "a" (c),"0" (cs),"1" (count));
335 static inline void * __memset_generic(void * s, char c,size_t count)
338 __asm__ __volatile__(
341 : "=&c" (d0), "=&D" (d1)
342 :"a" (c),"1" (s),"0" (count)
347 /* we might want to write optimized versions of these later */
348 #define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count))
351 * memset(x,0,y) is a reasonably common thing to do, so we want to fill
352 * things 32 bits at a time even when we don't know the size of the
353 * area at compile-time..
355 static inline void * __constant_c_memset(void * s, unsigned long c, size_t count)
358 __asm__ __volatile__(
363 "1:\ttestb $1,%b3\n\t"
367 : "=&c" (d0), "=&D" (d1)
368 :"a" (c), "q" (count), "0" (count/4), "1" ((long) s)
373 /* Added by Gertjan van Wingerde to make minix and sysv module work */
374 #define __HAVE_ARCH_STRNLEN
375 static inline size_t strnlen(const char * s, size_t count)
379 __asm__ __volatile__(
382 "1:\tcmpb $0,(%0)\n\t"
389 :"=a" (__res), "=&d" (d0)
390 :"c" (s),"1" (count));
393 /* end of additional stuff */
395 #define __HAVE_ARCH_STRSTR
397 extern char *strstr(const char *cs, const char *ct);
400 * This looks horribly ugly, but the compiler can optimize it totally,
401 * as we by now know that both pattern and count is constant..
403 static inline void * __constant_c_and_count_memset(void * s, unsigned long pattern, size_t count)
409 *(unsigned char *)s = pattern;
412 *(unsigned short *)s = pattern;
415 *(unsigned short *)s = pattern;
416 *(2+(unsigned char *)s) = pattern;
419 *(unsigned long *)s = pattern;
423 __asm__ __volatile__( \
426 : "=&c" (d0), "=&D" (d1) \
427 : "a" (pattern),"0" (count/4),"1" ((long) s) \
432 case 0: COMMON(""); return s;
433 case 1: COMMON("\n\tstosb"); return s;
434 case 2: COMMON("\n\tstosw"); return s;
435 default: COMMON("\n\tstosw\n\tstosb"); return s;
442 #define __constant_c_x_memset(s, c, count) \
443 (__builtin_constant_p(count) ? \
444 __constant_c_and_count_memset((s),(c),(count)) : \
445 __constant_c_memset((s),(c),(count)))
447 #define __memset(s, c, count) \
448 (__builtin_constant_p(count) ? \
449 __constant_count_memset((s),(c),(count)) : \
450 __memset_generic((s),(c),(count)))
452 #define __HAVE_ARCH_MEMSET
453 #define memset(s, c, count) \
454 (__builtin_constant_p(c) ? \
455 __constant_c_x_memset((s),(0x01010101UL*(unsigned char)(c)),(count)) : \
456 __memset((s),(c),(count)))
459 * find the first occurrence of byte 'c', or 1 past the area if none
461 #define __HAVE_ARCH_MEMSCAN
462 static inline void * memscan(void * addr, int c, size_t size)
466 __asm__("repnz; scasb\n\t"
470 : "=D" (addr), "=c" (size)
471 : "0" (addr), "1" (size), "a" (c));
475 #endif /* __KERNEL__ */