ChangeSet 1.1343, 2005/04/20 18:46:27+01:00, kaf24@xxxxxxxxxxxxxxxxxxxx
Make the fallback memset/memcpy/memmove have a concrete in-place
implementation, or we end up with a recursive dependency when building
for x86/64.
Signed-off-by: Keir Fraser <keir@xxxxxxxxxxxxx>
string.c | 48 ++++++++++++++++++++++++++++++++++++++++++------
1 files changed, 42 insertions(+), 6 deletions(-)
diff -Nru a/xen/arch/x86/string.c b/xen/arch/x86/string.c
--- a/xen/arch/x86/string.c 2005-04-20 14:03:59 -04:00
+++ b/xen/arch/x86/string.c 2005-04-20 14:03:59 -04:00
@@ -9,19 +9,55 @@
#include <xen/lib.h>
#undef memmove
-void *memmove(void *dest, const void *src, size_t count)
+void *memmove(void *dest, const void *src, size_t n)
{
- return __memmove(dest, src, count);
+ int d0, d1, d2;
+
+ if ( dest < src )
+ return memcpy(dest, src, n);
+
+ __asm__ __volatile__ (
+ " std ; "
+ " rep ; movsb ; "
+ " cld "
+ : "=&c" (d0), "=&S" (d1), "=&D" (d2)
+ : "0" (n), "1" (n-1+(const char *)src), "2" (n-1+(char *)dest)
+ : "memory");
+
+ return dest;
}
#undef memcpy
-void *memcpy(void *dest, const void *src, size_t count)
+void *memcpy(void *dest, const void *src, size_t n)
{
- return __memcpy(dest, src, count);
+ int d0, d1, d2;
+
+ __asm__ __volatile__ (
+ " rep ; movsl ; "
+ " testb $2,%b4 ; "
+ " je 1f ; "
+ " movsw ; "
+ "1: testb $1,%b4 ; "
+ " je 2f ; "
+ " movsb ; "
+ "2: "
+ : "=&c" (d0), "=&D" (d1), "=&S" (d2)
+ : "0" (n/4), "q" (n), "1" (dest), "2" (src)
+ : "memory");
+
+ return dest;
}
#undef memset
-void *memset(void *s, int c, size_t count)
+void *memset(void *s, int c, size_t n)
{
- return __memset(s, c, count);
+ int d0, d1;
+
+ __asm__ __volatile__ (
+ "rep ; stosb"
+ : "=&c" (d0), "=&D" (d1)
+ : "a" (c), "1" (s), "0" (n)
+ : "memory");
+
+ return s;
}
_______________________________________________
Xen-changelog mailing list
Xen-changelog@xxxxxxxxxxxxxxxxxxx
http://lists.xensource.com/xen-changelog
|