WARNING - OLD ARCHIVES

This is an archived copy of the Xen.org mailing list, which we have preserved to ensure that existing links to archives are not broken. The live archive, which contains the latest emails, can be found at http://lists.xen.org/
   
 
 
Xen 
 
Home Products Support Community News
 
   
 

xen-changelog

[Xen-changelog] [xen-unstable] x86, spinlock: Get rid of .text.lock out-

To: xen-changelog@xxxxxxxxxxxxxxxxxxx
Subject: [Xen-changelog] [xen-unstable] x86, spinlock: Get rid of .text.lock out-of-line section.
From: Xen patchbot-unstable <patchbot-unstable@xxxxxxxxxxxxxxxxxxx>
Date: Wed, 22 Oct 2008 07:50:47 -0700
Delivery-date: Wed, 22 Oct 2008 07:52:52 -0700
Envelope-to: www-data@xxxxxxxxxxxxxxxxxxx
List-help: <mailto:xen-changelog-request@lists.xensource.com?subject=help>
List-id: BK change log <xen-changelog.lists.xensource.com>
List-post: <mailto:xen-changelog@lists.xensource.com>
List-subscribe: <http://lists.xensource.com/mailman/listinfo/xen-changelog>, <mailto:xen-changelog-request@lists.xensource.com?subject=subscribe>
List-unsubscribe: <http://lists.xensource.com/mailman/listinfo/xen-changelog>, <mailto:xen-changelog-request@lists.xensource.com?subject=unsubscribe>
Reply-to: xen-devel@xxxxxxxxxxxxxxxxxxx
Sender: xen-changelog-bounces@xxxxxxxxxxxxxxxxxxx
# HG changeset patch
# User Keir Fraser <keir.fraser@xxxxxxxxxx>
# Date 1224519475 -3600
# Node ID 7989e3999e8392751f735abe936af58443f776d7
# Parent  54d74fc0037ce688e79759ca632d3918f7aaa399
x86, spinlock: Get rid of .text.lock out-of-line section.

We don't care about code bloat now that spinlock operations are not
inlined into callers. This will make backtraces easier to read.

Signed-off-by: Keir Fraser <keir.fraser@xxxxxxxxxx>
---
 xen/arch/x86/x86_32/xen.lds.S  |    1 -
 xen/arch/x86/x86_64/xen.lds.S  |    1 -
 xen/include/asm-x86/rwlock.h   |   32 ++++++++++----------------------
 xen/include/asm-x86/spinlock.h |   17 ++++++++---------
 4 files changed, 18 insertions(+), 33 deletions(-)

diff -r 54d74fc0037c -r 7989e3999e83 xen/arch/x86/x86_32/xen.lds.S
--- a/xen/arch/x86/x86_32/xen.lds.S     Mon Oct 20 17:16:45 2008 +0100
+++ b/xen/arch/x86/x86_32/xen.lds.S     Mon Oct 20 17:17:55 2008 +0100
@@ -26,7 +26,6 @@ SECTIONS
        *(.fixup)
        *(.gnu.warning)
        } :text =0x9090
-  .text.lock : { *(.text.lock) } :text /* out-of-line lock text */
 
   _etext = .;                  /* End of text section */
 
diff -r 54d74fc0037c -r 7989e3999e83 xen/arch/x86/x86_64/xen.lds.S
--- a/xen/arch/x86/x86_64/xen.lds.S     Mon Oct 20 17:16:45 2008 +0100
+++ b/xen/arch/x86/x86_64/xen.lds.S     Mon Oct 20 17:17:55 2008 +0100
@@ -24,7 +24,6 @@ SECTIONS
        *(.fixup)
        *(.gnu.warning)
        } :text = 0x9090
-  .text.lock : { *(.text.lock) } :text /* out-of-line lock text */
 
   _etext = .;                  /* End of text section */
 
diff -r 54d74fc0037c -r 7989e3999e83 xen/include/asm-x86/rwlock.h
--- a/xen/include/asm-x86/rwlock.h      Mon Oct 20 17:16:45 2008 +0100
+++ b/xen/include/asm-x86/rwlock.h      Mon Oct 20 17:17:55 2008 +0100
@@ -22,25 +22,19 @@
 
 #define __build_read_lock_ptr(rw, helper)   \
        asm volatile(LOCK "subl $1,(%0)\n\t" \
-                    "js 2f\n" \
+                    "jns 1f\n\t" \
+                    "call " helper "\n\t" \
                     "1:\n" \
-                    ".section .text.lock,\"ax\"\n" \
-                    "2:\tcall " helper "\n\t" \
-                    "jmp 1b\n" \
-                    ".previous" \
                     ::"a" (rw) : "memory")
 
 #define __build_read_lock_const(rw, helper)   \
        asm volatile(LOCK "subl $1,%0\n\t" \
-                    "js 2f\n" \
-                    "1:\n" \
-                    ".section .text.lock,\"ax\"\n" \
-                    "2:\tpush %%"__OP"ax\n\t" \
+                    "jns 1f\n\t" \
+                    "push %%"__OP"ax\n\t" \
                     "lea %0,%%"__OP"ax\n\t" \
                     "call " helper "\n\t" \
                     "pop %%"__OP"ax\n\t" \
-                    "jmp 1b\n" \
-                    ".previous" \
+                    "1:\n" \
                     :"=m" (*(volatile int *)rw) : : "memory")
 
 #define __build_read_lock(rw, helper)  do { \
@@ -52,25 +46,19 @@
 
 #define __build_write_lock_ptr(rw, helper) \
        asm volatile(LOCK "subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \
-                    "jnz 2f\n" \
+                    "jz 1f\n\t" \
+                    "call " helper "\n\t" \
                     "1:\n" \
-                    ".section .text.lock,\"ax\"\n" \
-                    "2:\tcall " helper "\n\t" \
-                    "jmp 1b\n" \
-                    ".previous" \
                     ::"a" (rw) : "memory")
 
 #define __build_write_lock_const(rw, helper) \
        asm volatile(LOCK "subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \
-                    "jnz 2f\n" \
-                    "1:\n" \
-                    ".section .text.lock,\"ax\"\n" \
-                    "2:\tpush %%"__OP"ax\n\t" \
+                    "jz 1f\n\t" \
+                    "push %%"__OP"ax\n\t" \
                     "lea %0,%%"__OP"ax\n\t" \
                     "call " helper "\n\t" \
                     "pop %%"__OP"ax\n\t" \
-                    "jmp 1b\n" \
-                    ".previous" \
+                    "1:\n" \
                     :"=m" (*(volatile int *)rw) : : "memory")
 
 #define __build_write_lock(rw, helper) do { \
diff -r 54d74fc0037c -r 7989e3999e83 xen/include/asm-x86/spinlock.h
--- a/xen/include/asm-x86/spinlock.h    Mon Oct 20 17:16:45 2008 +0100
+++ b/xen/include/asm-x86/spinlock.h    Mon Oct 20 17:17:55 2008 +0100
@@ -18,14 +18,13 @@ static inline void _raw_spin_lock(raw_sp
 static inline void _raw_spin_lock(raw_spinlock_t *lock)
 {
     asm volatile (
-        "1:  lock; decb %0         \n"
-        "    js 2f                 \n"
-        ".section .text.lock,\"ax\"\n"
+        "1:  lock; decw %0         \n"
+        "    jns 3f                \n"
         "2:  rep; nop              \n"
-        "    cmpb $0,%0            \n"
+        "    cmpw $0,%0            \n"
         "    jle 2b                \n"
         "    jmp 1b                \n"
-        ".previous"
+        "3:"
         : "=m" (lock->lock) : : "memory" );
 }
 
@@ -33,16 +32,16 @@ static inline void _raw_spin_unlock(raw_
 {
     ASSERT(_raw_spin_is_locked(lock));
     asm volatile (
-        "movb $1,%0" 
+        "movw $1,%0" 
         : "=m" (lock->lock) : : "memory" );
 }
 
 static inline int _raw_spin_trylock(raw_spinlock_t *lock)
 {
-    char oldval;
+    s16 oldval;
     asm volatile (
-        "xchgb %b0,%1"
-        :"=q" (oldval), "=m" (lock->lock)
+        "xchgw %w0,%1"
+        :"=r" (oldval), "=m" (lock->lock)
         :"0" (0) : "memory" );
     return (oldval > 0);
 }

_______________________________________________
Xen-changelog mailing list
Xen-changelog@xxxxxxxxxxxxxxxxxxx
http://lists.xensource.com/xen-changelog

<Prev in Thread] Current Thread [Next in Thread>
  • [Xen-changelog] [xen-unstable] x86, spinlock: Get rid of .text.lock out-of-line section., Xen patchbot-unstable <=