WARNING - OLD ARCHIVES

This is an archived copy of the Xen.org mailing list, which we have preserved to ensure that existing links to archives are not broken. The live archive, which contains the latest emails, can be found at http://lists.xen.org/
   
 
 
Xen 
 
Home Products Support Community News
 
   
 

xen-changelog

[Xen-changelog] [xen-unstable] x86: Clean up and simplify rwlock impleme

To: xen-changelog@xxxxxxxxxxxxxxxxxxx
Subject: [Xen-changelog] [xen-unstable] x86: Clean up and simplify rwlock implementation.
From: Xen patchbot-unstable <patchbot-unstable@xxxxxxxxxxxxxxxxxxx>
Date: Tue, 16 Dec 2008 22:31:04 -0800
Delivery-date: Tue, 16 Dec 2008 22:32:42 -0800
Envelope-to: www-data@xxxxxxxxxxxxxxxxxxx
List-help: <mailto:xen-changelog-request@lists.xensource.com?subject=help>
List-id: BK change log <xen-changelog.lists.xensource.com>
List-post: <mailto:xen-changelog@lists.xensource.com>
List-subscribe: <http://lists.xensource.com/mailman/listinfo/xen-changelog>, <mailto:xen-changelog-request@lists.xensource.com?subject=subscribe>
List-unsubscribe: <http://lists.xensource.com/mailman/listinfo/xen-changelog>, <mailto:xen-changelog-request@lists.xensource.com?subject=unsubscribe>
Reply-to: xen-devel@xxxxxxxxxxxxxxxxxxx
Sender: xen-changelog-bounces@xxxxxxxxxxxxxxxxxxx
# HG changeset patch
# User Keir Fraser <keir.fraser@xxxxxxxxxx>
# Date 1229183776 0
# Node ID 6a3c2b4459ade54bb860d36b7566b182f4ec1cf9
# Parent  e767f80d4bcc90d761df14ba89c2d72d44c99d8b
x86: Clean up and simplify rwlock implementation.
Signed-off-by: Keir Fraser <keir.fraser@xxxxxxxxxx>
---
 xen/arch/x86/rwlock.c          |   28 ----------------
 xen/include/asm-x86/rwlock.h   |   71 -----------------------------------------
 xen/arch/x86/Makefile          |    1 
 xen/include/asm-x86/spinlock.h |   49 +++++++++++++++++++---------
 4 files changed, 34 insertions(+), 115 deletions(-)

diff -r e767f80d4bcc -r 6a3c2b4459ad xen/arch/x86/Makefile
--- a/xen/arch/x86/Makefile     Sat Dec 13 15:28:10 2008 +0000
+++ b/xen/arch/x86/Makefile     Sat Dec 13 15:56:16 2008 +0000
@@ -37,7 +37,6 @@ obj-y += numa.o
 obj-y += numa.o
 obj-y += pci.o
 obj-y += physdev.o
-obj-y += rwlock.o
 obj-y += setup.o
 obj-y += shutdown.o
 obj-y += smp.o
diff -r e767f80d4bcc -r 6a3c2b4459ad xen/arch/x86/rwlock.c
--- a/xen/arch/x86/rwlock.c     Sat Dec 13 15:28:10 2008 +0000
+++ /dev/null   Thu Jan 01 00:00:00 1970 +0000
@@ -1,28 +0,0 @@
-#include <asm/atomic.h>
-#include <asm/rwlock.h>
-
-#if defined(CONFIG_SMP)
-asm(
-".align  4\n"
-".globl  __write_lock_failed\n"
-"__write_lock_failed:\n"
-"        " LOCK "addl    $" RW_LOCK_BIAS_STR ",(%"__OP"ax)\n"
-"1:      rep; nop\n"
-"        cmpl    $" RW_LOCK_BIAS_STR ",(%"__OP"ax)\n"
-"        jne     1b\n"
-"        " LOCK "subl    $" RW_LOCK_BIAS_STR ",(%"__OP"ax)\n"
-"        jnz     __write_lock_failed\n"
-"        ret\n"
-
-".align  4\n"
-".globl  __read_lock_failed\n"
-"__read_lock_failed:\n"
-"        lock ; incl     (%"__OP"ax)\n"
-"1:      rep; nop\n"
-"        cmpl    $1,(%"__OP"ax)\n"
-"        js      1b\n"
-"        lock ; decl     (%"__OP"ax)\n"
-"        js      __read_lock_failed\n"
-"        ret\n"
-);
-#endif
diff -r e767f80d4bcc -r 6a3c2b4459ad xen/include/asm-x86/rwlock.h
--- a/xen/include/asm-x86/rwlock.h      Sat Dec 13 15:28:10 2008 +0000
+++ /dev/null   Thu Jan 01 00:00:00 1970 +0000
@@ -1,71 +0,0 @@
-/* include/asm-x86/rwlock.h
- *
- *     Helpers used by both rw spinlocks and rw semaphores.
- *
- *     Based in part on code from semaphore.h and
- *     spinlock.h Copyright 1996 Linus Torvalds.
- *
- *     Copyright 1999 Red Hat, Inc.
- *
- *     Written by Benjamin LaHaise.
- *
- *     This program is free software; you can redistribute it and/or
- *     modify it under the terms of the GNU General Public License
- *     as published by the Free Software Foundation; either version
- *     2 of the License, or (at your option) any later version.
- */
-#ifndef _ASM_X86_RWLOCK_H
-#define _ASM_X86_RWLOCK_H
-
-#define RW_LOCK_BIAS            0x01000000
-#define RW_LOCK_BIAS_STR       "0x01000000"
-
-#define __build_read_lock_ptr(rw, helper)   \
-       asm volatile(LOCK "subl $1,(%0)\n\t" \
-                    "jns 1f\n\t" \
-                    "call " helper "\n\t" \
-                    "1:\n" \
-                    ::"a" (rw) : "memory")
-
-#define __build_read_lock_const(rw, helper)   \
-       asm volatile(LOCK "subl $1,%0\n\t" \
-                    "jns 1f\n\t" \
-                    "push %%"__OP"ax\n\t" \
-                    "lea %0,%%"__OP"ax\n\t" \
-                    "call " helper "\n\t" \
-                    "pop %%"__OP"ax\n\t" \
-                    "1:\n" \
-                    :"=m" (*(volatile int *)rw) : : "memory")
-
-#define __build_read_lock(rw, helper)  do { \
-                                               if (__builtin_constant_p(rw)) \
-                                                       
__build_read_lock_const(rw, helper); \
-                                               else \
-                                                       
__build_read_lock_ptr(rw, helper); \
-                                       } while (0)
-
-#define __build_write_lock_ptr(rw, helper) \
-       asm volatile(LOCK "subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \
-                    "jz 1f\n\t" \
-                    "call " helper "\n\t" \
-                    "1:\n" \
-                    ::"a" (rw) : "memory")
-
-#define __build_write_lock_const(rw, helper) \
-       asm volatile(LOCK "subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \
-                    "jz 1f\n\t" \
-                    "push %%"__OP"ax\n\t" \
-                    "lea %0,%%"__OP"ax\n\t" \
-                    "call " helper "\n\t" \
-                    "pop %%"__OP"ax\n\t" \
-                    "1:\n" \
-                    :"=m" (*(volatile int *)rw) : : "memory")
-
-#define __build_write_lock(rw, helper) do { \
-                                               if (__builtin_constant_p(rw)) \
-                                                       
__build_write_lock_const(rw, helper); \
-                                               else \
-                                                       
__build_write_lock_ptr(rw, helper); \
-                                       } while (0)
-
-#endif
diff -r e767f80d4bcc -r 6a3c2b4459ad xen/include/asm-x86/spinlock.h
--- a/xen/include/asm-x86/spinlock.h    Sat Dec 13 15:28:10 2008 +0000
+++ b/xen/include/asm-x86/spinlock.h    Sat Dec 13 15:56:16 2008 +0000
@@ -4,7 +4,6 @@
 #include <xen/config.h>
 #include <xen/lib.h>
 #include <asm/atomic.h>
-#include <asm/rwlock.h>
 
 typedef struct {
     volatile s16 lock;
@@ -49,30 +48,50 @@ typedef struct {
     volatile unsigned int lock;
 } raw_rwlock_t;
 
+#define RW_LOCK_BIAS            0x01000000
 #define _RAW_RW_LOCK_UNLOCKED /*(raw_rwlock_t)*/ { RW_LOCK_BIAS }
 
-/*
- * On x86, we implement read-write locks as a 32-bit counter
- * with the high bit (sign) being the "contended" bit.
- */
 static always_inline void _raw_read_lock(raw_rwlock_t *rw)
 {
-    __build_read_lock(rw, "__read_lock_failed");
+    asm volatile (
+        "1:  lock; decl %0         \n"
+        "    jns 3f                \n"
+        "    lock; incl %0         \n"
+        "2:  rep; nop              \n"
+        "    cmpl $1,%0            \n"
+        "    js 2b                 \n"
+        "    jmp 1b                \n"
+        "3:"
+        : "=m" (rw->lock) : : "memory" );
 }
 
 static always_inline void _raw_write_lock(raw_rwlock_t *rw)
 {
-    __build_write_lock(rw, "__write_lock_failed");
+    asm volatile (
+        "1:  lock; subl %1,%0      \n"
+        "    jz 3f                 \n"
+        "    lock; addl %1,%0      \n"
+        "2:  rep; nop              \n"
+        "    cmpl %1,%0            \n"
+        "    jne 2b                \n"
+        "    jmp 1b                \n"
+        "3:"
+        : "=m" (rw->lock) : "i" (RW_LOCK_BIAS) : "memory" );
 }
 
-#define _raw_read_unlock(rw)                    \
-    asm volatile (                              \
-        "lock ; incl %0" :                      \
-        "=m" ((rw)->lock) : : "memory" )
-#define _raw_write_unlock(rw)                           \
-    asm volatile (                                      \
-        "lock ; addl $" RW_LOCK_BIAS_STR ",%0" :        \
-        "=m" ((rw)->lock) : : "memory" )
+static always_inline void _raw_read_unlock(raw_rwlock_t *rw)
+{
+    asm volatile (
+        "lock ; incl %0"
+        : "=m" ((rw)->lock) : : "memory" );
+}
+
+static always_inline void _raw_write_unlock(raw_rwlock_t *rw)
+{
+    asm volatile (
+        "lock ; addl %1,%0"
+        : "=m" ((rw)->lock) : "i" (RW_LOCK_BIAS) : "memory" );
+}
 
 #define _raw_rw_is_locked(x) ((x)->lock < RW_LOCK_BIAS)
 

_______________________________________________
Xen-changelog mailing list
Xen-changelog@xxxxxxxxxxxxxxxxxxx
http://lists.xensource.com/xen-changelog

<Prev in Thread] Current Thread [Next in Thread>
  • [Xen-changelog] [xen-unstable] x86: Clean up and simplify rwlock implementation., Xen patchbot-unstable <=