Subversion Repositories Kolibri OS

Rev

Go to most recent revision | Details | Last modification | View Log | RSS feed

Rev Author Line No. Line
4680 right-hear 1
#ifndef _I386_STRING_H_
2
#define _I386_STRING_H_
3
 
4
#ifndef STATIC
5
#define STATIC	static
6
#endif
7
 
8
#ifndef INLINE
9
#define INLINE	inline
10
#endif
11
 
12
#include
13
 
14
#define __HAVE_ARCH_STRCPY
15
STATIC INLINE char * strcpy(char * dest,const char *src)
16
{
17
int d0, d1, d2;
18
__asm__ __volatile__(
19
	"1:\tlodsb\n\t"
20
	"stosb\n\t"
21
	"testb %%al,%%al\n\t"
22
	"jne 1b"
23
	: "=&S" (d0), "=&D" (d1), "=&a" (d2)
24
	:"0" (src),"1" (dest) : "memory");
25
return dest;
26
}
27
 
28
#define __HAVE_ARCH_STRNCPY
29
STATIC INLINE char * strncpy(char * dest,const char *src,size_t count)
30
{
31
int d0, d1, d2, d3;
32
__asm__ __volatile__(
33
	"1:\tdecl %2\n\t"
34
	"js 2f\n\t"
35
	"lodsb\n\t"
36
	"stosb\n\t"
37
	"testb %%al,%%al\n\t"
38
	"jne 1b\n\t"
39
	"rep\n\t"
40
	"stosb\n"
41
	"2:"
42
	: "=&S" (d0), "=&D" (d1), "=&c" (d2), "=&a" (d3)
43
	:"0" (src),"1" (dest),"2" (count) : "memory");
44
return dest;
45
}
46
 
47
#define __HAVE_ARCH_STRCAT
48
STATIC INLINE char * strcat(char * dest,const char * src)
49
{
50
int d0, d1, d2, d3;
51
__asm__ __volatile__(
52
	"repne\n\t"
53
	"scasb\n\t"
54
	"decl %1\n"
55
	"1:\tlodsb\n\t"
56
	"stosb\n\t"
57
	"testb %%al,%%al\n\t"
58
	"jne 1b"
59
	: "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
60
	: "0" (src), "1" (dest), "2" (0), "3" (0xffffffff):"memory");
61
return dest;
62
}
63
 
64
#define __HAVE_ARCH_STRNCAT
65
STATIC INLINE char * strncat(char * dest,const char * src,size_t count)
66
{
67
int d0, d1, d2, d3;
68
__asm__ __volatile__(
69
	"repne\n\t"
70
	"scasb\n\t"
71
	"decl %1\n\t"
72
	"movl %8,%3\n"
73
	"1:\tdecl %3\n\t"
74
	"js 2f\n\t"
75
	"lodsb\n\t"
76
	"stosb\n\t"
77
	"testb %%al,%%al\n\t"
78
	"jne 1b\n"
79
	"2:\txorl %2,%2\n\t"
80
	"stosb"
81
	: "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
82
	: "0" (src),"1" (dest),"2" (0),"3" (0xffffffff), "g" (count)
83
	: "memory");
84
return dest;
85
}
86
 
87
#define __HAVE_ARCH_STRCMP
88
STATIC INLINE int strcmp(const char * cs,const char * ct)
89
{
90
int d0, d1;
91
register int __res;
92
__asm__ __volatile__(
93
	"1:\tlodsb\n\t"
94
	"scasb\n\t"
95
	"jne 2f\n\t"
96
	"testb %%al,%%al\n\t"
97
	"jne 1b\n\t"
98
	"xorl %%eax,%%eax\n\t"
99
	"jmp 3f\n"
100
	"2:\tsbbl %%eax,%%eax\n\t"
101
	"orb $1,%%al\n"
102
	"3:"
103
	:"=a" (__res), "=&S" (d0), "=&D" (d1)
104
		     :"1" (cs),"2" (ct));
105
return __res;
106
}
107
 
108
#define __HAVE_ARCH_STRNCMP
109
STATIC INLINE int strncmp(const char * cs,const char * ct,size_t count)
110
{
111
register int __res;
112
int d0, d1, d2;
113
__asm__ __volatile__(
114
	"1:\tdecl %3\n\t"
115
	"js 2f\n\t"
116
	"lodsb\n\t"
117
	"scasb\n\t"
118
	"jne 3f\n\t"
119
	"testb %%al,%%al\n\t"
120
	"jne 1b\n"
121
	"2:\txorl %%eax,%%eax\n\t"
122
	"jmp 4f\n"
123
	"3:\tsbbl %%eax,%%eax\n\t"
124
	"orb $1,%%al\n"
125
	"4:"
126
		     :"=a" (__res), "=&S" (d0), "=&D" (d1), "=&c" (d2)
127
		     :"1" (cs),"2" (ct),"3" (count));
128
return __res;
129
}
130
 
131
#define __HAVE_ARCH_STRCHR
132
STATIC INLINE char * strchr(const char * s, int c)
133
{
134
int d0;
135
register char * __res;
136
__asm__ __volatile__(
137
	"movb %%al,%%ah\n"
138
	"1:\tlodsb\n\t"
139
	"cmpb %%ah,%%al\n\t"
140
	"je 2f\n\t"
141
	"testb %%al,%%al\n\t"
142
	"jne 1b\n\t"
143
	"movl $1,%1\n"
144
	"2:\tmovl %1,%0\n\t"
145
	"decl %0"
146
	:"=a" (__res), "=&S" (d0) : "1" (s),"0" (c));
147
return __res;
148
}
149
 
150
#define __HAVE_ARCH_STRRCHR
151
STATIC INLINE char * strrchr(const char * s, int c)
152
{
153
int d0, d1;
154
register char * __res;
155
__asm__ __volatile__(
156
	"movb %%al,%%ah\n"
157
	"1:\tlodsb\n\t"
158
	"cmpb %%ah,%%al\n\t"
159
	"jne 2f\n\t"
160
	"leal -1(%%esi),%0\n"
161
	"2:\ttestb %%al,%%al\n\t"
162
	"jne 1b"
163
	:"=g" (__res), "=&S" (d0), "=&a" (d1) :"0" (0),"1" (s),"2" (c));
164
return __res;
165
}
166
 
167
#define __HAVE_ARCH_STRLEN
168
STATIC INLINE size_t strlen(const char * s)
169
{
170
int d0;
171
register int __res;
172
__asm__ __volatile__(
173
	"repne\n\t"
174
	"scasb\n\t"
175
	"notl %0\n\t"
176
	"decl %0"
177
	:"=c" (__res), "=&D" (d0) :"1" (s),"a" (0), "0" (0xffffffff));
178
return __res;
179
}
180
 
181
static inline void * __memcpy(void * to, const void * from, size_t n)
182
{
183
int d0, d1, d2;
184
__asm__ __volatile__(
185
	"rep ; movsl\n\t"
186
	"testb $2,%b4\n\t"
187
	"je 1f\n\t"
188
	"movsw\n"
189
	"1:\ttestb $1,%b4\n\t"
190
	"je 2f\n\t"
191
	"movsb\n"
192
	"2:"
193
	: "=&c" (d0), "=&D" (d1), "=&S" (d2)
194
	:"0" (n/4), "q" (n),"1" ((long) to),"2" ((long) from)
195
	: "memory");
196
return (to);
197
}
198
 
199
/*
200
 * This looks horribly ugly, but the compiler can optimize it totally,
201
 * as the count is constant.
202
 */
203
static inline void * __constant_memcpy(void * to, const void * from, size_t n)
204
{
205
	switch (n) {
206
		case 0:
207
			return to;
208
		case 1:
209
			*(unsigned char *)to = *(const unsigned char *)from;
210
			return to;
211
		case 2:
212
			*(unsigned short *)to = *(const unsigned short *)from;
213
			return to;
214
		case 3:
215
			*(unsigned short *)to = *(const unsigned short *)from;
216
			*(2+(unsigned char *)to) = *(2+(const unsigned char *)from);
217
			return to;
218
		case 4:
219
			*(unsigned long *)to = *(const unsigned long *)from;
220
			return to;
221
		case 6:	/* for Ethernet addresses */
222
			*(unsigned long *)to = *(const unsigned long *)from;
223
			*(2+(unsigned short *)to) = *(2+(const unsigned short *)from);
224
			return to;
225
		case 8:
226
			*(unsigned long *)to = *(const unsigned long *)from;
227
			*(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
228
			return to;
229
		case 12:
230
			*(unsigned long *)to = *(const unsigned long *)from;
231
			*(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
232
			*(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
233
			return to;
234
		case 16:
235
			*(unsigned long *)to = *(const unsigned long *)from;
236
			*(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
237
			*(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
238
			*(3+(unsigned long *)to) = *(3+(const unsigned long *)from);
239
			return to;
240
		case 20:
241
			*(unsigned long *)to = *(const unsigned long *)from;
242
			*(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
243
			*(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
244
			*(3+(unsigned long *)to) = *(3+(const unsigned long *)from);
245
			*(4+(unsigned long *)to) = *(4+(const unsigned long *)from);
246
			return to;
247
	}
248
#define COMMON(x) \
249
__asm__ __volatile__( \
250
	"rep ; movsl" \
251
	x \
252
	: "=&c" (d0), "=&D" (d1), "=&S" (d2) \
253
	: "0" (n/4),"1" ((long) to),"2" ((long) from) \
254
	: "memory");
255
{
256
	int d0, d1, d2;
257
	switch (n % 4) {
258
		case 0: COMMON(""); return to;
259
		case 1: COMMON("\n\tmovsb"); return to;
260
		case 2: COMMON("\n\tmovsw"); return to;
261
		default: COMMON("\n\tmovsw\n\tmovsb"); return to;
262
	}
263
}
264
 
265
#undef COMMON
266
}
267
 
268
#define __HAVE_ARCH_MEMCPY
269
 
270
#ifdef CONFIG_X86_USE_3DNOW
271
 
272
#include 
273
 
274
/*
275
 *	This CPU favours 3DNow strongly (eg AMD Athlon)
276
 */
277
 
278
static inline void * __constant_memcpy3d(void * to, const void * from, size_t len)
279
{
280
	if (len < 512)
281
		return __constant_memcpy(to, from, len);
282
	return _mmx_memcpy(to, from, len);
283
}
284
 
285
static inline  void *__memcpy3d(void *to, const void *from, size_t len)
286
{
287
	if (len < 512)
288
		return __memcpy(to, from, len);
289
	return _mmx_memcpy(to, from, len);
290
}
291
 
292
#define memcpy(t, f, n) \
293
(__builtin_constant_p(n) ? \
294
 __constant_memcpy3d((t),(f),(n)) : \
295
 __memcpy3d((t),(f),(n)))
296
 
297
#else
298
 
299
/*
300
 *	No 3D Now!
301
 */
302
 
303
#define memcpy(t, f, n) \
304
(__builtin_constant_p(n) ? \
305
 __constant_memcpy((t),(f),(n)) : \
306
 __memcpy((t),(f),(n)))
307
 
308
#endif
309
 
310
/*
311
 * struct_cpy(x,y), copy structure *x into (matching structure) *y.
312
 *
313
 * We get link-time errors if the structure sizes do not match.
314
 * There is no runtime overhead, it's all optimized away at
315
 * compile time.
316
 */
317
extern void __struct_cpy_bug (void);
318
 
319
#define struct_cpy(x,y) 			\
320
({						\
321
	if (sizeof(*(x)) != sizeof(*(y))) 	\
322
		__struct_cpy_bug;		\
323
	memcpy(x, y, sizeof(*(x)));		\
324
})
325
 
326
#define __HAVE_ARCH_MEMMOVE
327
STATIC INLINE void * memmove(void * dest,const void * src, size_t n)
328
{
329
int d0, d1, d2;
330
if (dest
331
__asm__ __volatile__(
332
	"rep\n\t"
333
	"movsb"
334
	: "=&c" (d0), "=&S" (d1), "=&D" (d2)
335
	:"0" (n),"1" (src),"2" (dest)
336
	: "memory");
337
else
338
__asm__ __volatile__(
339
	"std\n\t"
340
	"rep\n\t"
341
	"movsb\n\t"
342
	"cld"
343
	: "=&c" (d0), "=&S" (d1), "=&D" (d2)
344
	:"0" (n),
345
	 "1" (n-1+(const char *)src),
346
	 "2" (n-1+(char *)dest)
347
	:"memory");
348
return dest;
349
}
350
 
351
#define memcmp __builtin_memcmp
352
 
353
#define __HAVE_ARCH_MEMCHR
354
STATIC INLINE void * memchr(const void * cs,int c,size_t count)
355
{
356
int d0;
357
register void * __res;
358
if (!count)
359
	return NULL;
360
__asm__ __volatile__(
361
	"repne\n\t"
362
	"scasb\n\t"
363
	"je 1f\n\t"
364
	"movl $1,%0\n"
365
	"1:\tdecl %0"
366
	:"=D" (__res), "=&c" (d0) : "a" (c),"0" (cs),"1" (count));
367
return __res;
368
}
369
 
370
static inline void * __memset_generic(void * s, char c,size_t count)
371
{
372
int d0, d1;
373
__asm__ __volatile__(
374
	"rep\n\t"
375
	"stosb"
376
	: "=&c" (d0), "=&D" (d1)
377
	:"a" (c),"1" (s),"0" (count)
378
	:"memory");
379
return s;
380
}
381
 
382
/* we might want to write optimized versions of these later */
383
#define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count))
384
 
385
/*
386
 * memset(x,0,y) is a reasonably common thing to do, so we want to fill
387
 * things 32 bits at a time even when we don't know the size of the
388
 * area at compile-time..
389
 */
390
static inline void * __constant_c_memset(void * s, unsigned long c, size_t count)
391
{
392
int d0, d1;
393
__asm__ __volatile__(
394
	"rep ; stosl\n\t"
395
	"testb $2,%b3\n\t"
396
	"je 1f\n\t"
397
	"stosw\n"
398
	"1:\ttestb $1,%b3\n\t"
399
	"je 2f\n\t"
400
	"stosb\n"
401
	"2:"
402
	: "=&c" (d0), "=&D" (d1)
403
	:"a" (c), "q" (count), "0" (count/4), "1" ((long) s)
404
	:"memory");
405
return (s);
406
}
407
 
408
/* Added by Gertjan van Wingerde to make minix and sysv module work */
409
#define __HAVE_ARCH_STRNLEN
410
STATIC INLINE size_t strnlen(const char * s, size_t count)
411
{
412
int d0;
413
register int __res;
414
__asm__ __volatile__(
415
	"movl %2,%0\n\t"
416
	"jmp 2f\n"
417
	"1:\tcmpb $0,(%0)\n\t"
418
	"je 3f\n\t"
419
	"incl %0\n"
420
	"2:\tdecl %1\n\t"
421
	"cmpl $-1,%1\n\t"
422
	"jne 1b\n"
423
	"3:\tsubl %2,%0"
424
	:"=a" (__res), "=&d" (d0)
425
	:"c" (s),"1" (count));
426
return __res;
427
}
428
/* end of additional stuff */
429
 
430
#define __HAVE_ARCH_STRSTR
431
 
432
extern char *strstr(const char *cs, const char *ct);
433
 
434
/*
435
 * This looks horribly ugly, but the compiler can optimize it totally,
436
 * as we by now know that both pattern and count is constant..
437
 */
438
static inline void * __constant_c_and_count_memset(void * s, unsigned long pattern, size_t count)
439
{
440
	switch (count) {
441
		case 0:
442
			return s;
443
		case 1:
444
			*(unsigned char *)s = pattern;
445
			return s;
446
		case 2:
447
			*(unsigned short *)s = pattern;
448
			return s;
449
		case 3:
450
			*(unsigned short *)s = pattern;
451
			*(2+(unsigned char *)s) = pattern;
452
			return s;
453
		case 4:
454
			*(unsigned long *)s = pattern;
455
			return s;
456
	}
457
#define COMMON(x) \
458
__asm__  __volatile__( \
459
	"rep ; stosl" \
460
	x \
461
	: "=&c" (d0), "=&D" (d1) \
462
	: "a" (pattern),"0" (count/4),"1" ((long) s) \
463
	: "memory")
464
{
465
	int d0, d1;
466
	switch (count % 4) {
467
		case 0: COMMON(""); return s;
468
		case 1: COMMON("\n\tstosb"); return s;
469
		case 2: COMMON("\n\tstosw"); return s;
470
		default: COMMON("\n\tstosw\n\tstosb"); return s;
471
	}
472
}
473
 
474
#undef COMMON
475
}
476
 
477
#define __constant_c_x_memset(s, c, count) \
478
(__builtin_constant_p(count) ? \
479
 __constant_c_and_count_memset((s),(c),(count)) : \
480
 __constant_c_memset((s),(c),(count)))
481
 
482
#define __memset(s, c, count) \
483
(__builtin_constant_p(count) ? \
484
 __constant_count_memset((s),(c),(count)) : \
485
 __memset_generic((s),(c),(count)))
486
 
487
#define __HAVE_ARCH_MEMSET
488
#define memset(s, c, count) \
489
(__builtin_constant_p(c) ? \
490
 __constant_c_x_memset((s),(0x01010101UL*(unsigned char)(c)),(count)) : \
491
 __memset((s),(c),(count)))
492
 
493
/*
494
 * find the first occurrence of byte 'c', or 1 past the area if none
495
 */
496
#define __HAVE_ARCH_MEMSCAN
497
STATIC INLINE void * memscan(void * addr, int c, size_t size)
498
{
499
	if (!size)
500
		return addr;
501
	__asm__("repnz; scasb\n\t"
502
		"jnz 1f\n\t"
503
		"dec %%edi\n"
504
		"1:"
505
		: "=D" (addr), "=c" (size)
506
		: "0" (addr), "1" (size), "a" (c));
507
	return addr;
508
}
509
 
510
#endif