--- linux-2.6.12-xen0-arch.orig/include/asm-i386/spinlock.h
+++ linux-2.6.12-xen0-arch/include/asm-i386/spinlock.h
@@ -7,6 +7,7 @@
#include <linux/config.h>
#include <linux/compiler.h>
#include <asm/smp_alt.h>
+#include <mach_spinlock.h>
asmlinkage int printk(const char * fmt, ...)
__attribute__ ((format (printf, 1, 2)));
@@ -121,40 +122,6 @@ static inline void _raw_spin_unlock(spin
#endif
-static inline int _raw_spin_trylock(spinlock_t *lock)
-{
- char oldval;
-#ifdef CONFIG_SMP_ALTERNATIVES
- __asm__ __volatile__(
- "1:movb %1,%b0\n"
- "movb $0,%1\n"
- "2:"
- ".section __smp_alternatives,\"a\"\n"
- ".long 1b\n"
- ".long 3f\n"
- ".previous\n"
- ".section __smp_replacements,\"a\"\n"
- "3: .byte 2b - 1b\n"
- ".byte 5f-4f\n"
- ".byte 0\n"
- ".byte 6f-5f\n"
- ".byte -1\n"
- "4: xchgb %b0,%1\n"
- "5: movb %1,%b0\n"
- "movb $0,%1\n"
- "6:\n"
- ".previous\n"
- :"=q" (oldval), "=m" (lock->slock)
- :"0" (0) : "memory");
-#else
- __asm__ __volatile__(
- "xchgb %b0,%1\n"
- :"=q" (oldval), "=m" (lock->slock)
- :"0" (0) : "memory");
-#endif
- return oldval > 0;
-}
-
static inline void _raw_spin_lock(spinlock_t *lock)
{
#ifdef CONFIG_DEBUG_SPINLOCK
@@ -252,9 +219,6 @@ static inline void _raw_write_lock(rwloc
__build_write_lock(rw, "__write_lock_failed");
}
-#define _raw_read_unlock(rw) asm volatile(LOCK "incl %0" :"=m" ((rw)->lock)
: : "memory")
-#define _raw_write_unlock(rw) asm volatile(LOCK "addl $" RW_LOCK_BIAS_STR
",%0":"=m" ((rw)->lock) : : "memory")
-
static inline int _raw_read_trylock(rwlock_t *lock)
{
atomic_t *count = (atomic_t *)lock;
--- /dev/null
+++ linux-2.6.12-xen0-arch/include/asm-i386/mach-default/mach_spinlock.h
@@ -0,0 +1,70 @@
+#ifndef __ASM_MACH_SPINLOCK_H
+#define __ASM_MACH_SPINLOCK_H
+
+#define spin_lock_string \
+ "1:\n" \
+ LOCK \
+ "decb %0\n\t" \
+ "jns 3f\n" \
+ "2:\t" \
+ "rep;nop\n\t" \
+ "cmpb $0,%0\n\t" \
+ "jle 2b\n\t" \
+ "jmp 1b\n" \
+ "3:\n\t"
+
+#define spin_lock_string_flags \
+ "1:\n" \
+ LOCK \
+ "decb %0\n\t" \
+ "jns 4f\n\t" \
+ "2:\t" \
+ "testl $0x200, %1\n\t" \
+ "jz 3f\n\t" \
+ "sti\n\t" \
+ "3:\t" \
+ "rep;nop\n\t" \
+ "cmpb $0, %0\n\t" \
+ "jle 3b\n\t" \
+ "cli\n\t" \
+ "jmp 1b\n" \
+ "4:\n\t"
+
+static inline int _raw_spin_trylock(spinlock_t *lock)
+{
+ char oldval;
+#ifdef CONFIG_SMP_ALTERNATIVES
+ __asm__ __volatile__(
+ "1:movb %1,%b0\n"
+ "movb $0,%1\n"
+ "2:"
+ ".section __smp_alternatives,\"a\"\n"
+ ".long 1b\n"
+ ".long 3f\n"
+ ".previous\n"
+ ".section __smp_replacements,\"a\"\n"
+ "3: .byte 2b - 1b\n"
+ ".byte 5f-4f\n"
+ ".byte 0\n"
+ ".byte 6f-5f\n"
+ ".byte -1\n"
+ "4: xchgb %b0,%1\n"
+ "5: movb %1,%b0\n"
+ "movb $0,%1\n"
+ "6:\n"
+ ".previous\n"
+ :"=q" (oldval), "=m" (lock->slock)
+ :"0" (0) : "memory");
+#else
+ __asm__ __volatile__(
+ "xchgb %b0,%1\n"
+ :"=q" (oldval), "=m" (lock->slock)
+ :"0" (0) : "memory");
+#endif
+ return oldval > 0;
+}
+
+#define _raw_read_unlock(rw) asm volatile(LOCK "incl %0" :"=m" ((rw)->lock)
: : "memory")
+#define _raw_write_unlock(rw) asm volatile(LOCK "addl $" RW_LOCK_BIAS_STR
",%0":"=m" ((rw)->lock) : : "memory")
+
+#endif /* __ASM_MACH_SPINLOCK_H */
--- linux-2.6.12-xen0/include/asm-i386/mach-xen/mach_spinlock.h 1969-12-31
16:00:00.000000000 -0800
+++ linux-2.6.12-xen0-arch/include/asm-i386/mach-xen/mach_spinlock.h
2005-08-02 00:39:54.000000000 -0700
@@ -0,0 +1,47 @@
+#ifndef __ASM_MACH_SPINLOCK_H
+#define __ASM_MACH_SPINLOCK_H
+
+#define spin_lock_string \
+ "\n1:\t" \
+ "lock ; decb %0\n\t" \
+ "jns 3f\n" \
+ "2:\t" \
+ "rep;nop\n\t" \
+ "cmpb $0,%0\n\t" \
+ "jle 2b\n\t" \
+ "jmp 1b\n" \
+ "3:\n\t"
+
+#define spin_lock_string_flags \
+ "\n1:\t" \
+ "lock ; decb %0\n\t" \
+ "jns 4f\n\t" \
+ "2:\t" \
+ "testl $0x200, %1\n\t" \
+ "jz 3f\n\t" \
+ "#sti\n\t" \
+ "3:\t" \
+ "rep;nop\n\t" \
+ "cmpb $0, %0\n\t" \
+ "jle 3b\n\t" \
+ "#cli\n\t" \
+ "jmp 1b\n" \
+ "4:\n\t"
+
+/* FIXME - already controlled by CONFIG_SMP_ALTERNATIVES */
+static inline int _raw_spin_trylock(spinlock_t *lock)
+{
+ char oldval;
+ __asm__ __volatile__(
+ "xchgb %b0,%1"
+ :"=q" (oldval), "=m" (lock->slock)
+ :"0" (0) : "memory");
+ return oldval > 0;
+}
+
+
+/* FIXME - why not just use LOCK */
+#define _raw_read_unlock(rw) asm volatile("lock ; incl %0" :"=m"
((rw)->lock) : : "memory")
+#define _raw_write_unlock(rw) asm volatile("lock ; addl $" RW_LOCK_BIAS_STR
",%0":"=m" ((rw)->lock) : : "memory")
+
+#endif /* __ASM_MACH_SPINLOCK_H */
--
_______________________________________________
Xen-merge mailing list
Xen-merge@xxxxxxxxxxxxxxxxxxx
http://lists.xensource.com/xen-merge
|