cleanup
[linux-2.4.git] / include / asm-s390 / spinlock.h
1 /*
2  *  include/asm-s390/spinlock.h
3  *
4  *  S390 version
5  *    Copyright (C) 1999 IBM Deutschland Entwicklung GmbH, IBM Corporation
6  *    Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com)
7  *
8  *  Derived from "include/asm-i386/spinlock.h"
9  */
10
11 #ifndef __ASM_SPINLOCK_H
12 #define __ASM_SPINLOCK_H
13
14 /*
15  * Simple spin lock operations.  There are two variants, one clears IRQ's
16  * on the local processor, one does not.
17  *
18  * We make no fairness assumptions. They have a cost.
19  */
20
21 typedef struct {
22         volatile unsigned long lock;
23 } spinlock_t;
24
25 #define SPIN_LOCK_UNLOCKED (spinlock_t) { 0 }
26 #define spin_lock_init(lp) do { (lp)->lock = 0; } while(0)
27 #define spin_unlock_wait(lp)    do { barrier(); } while(((volatile spinlock_t *)(lp))->lock)
28 #define spin_is_locked(x) ((x)->lock != 0)
29
30 extern inline void spin_lock(spinlock_t *lp)
31 {
32         unsigned int reg1, reg2;
33         __asm__ __volatile("    bras  %0,1f\n"
34                            "0:  diag  0,0,68\n"
35                            "1:  slr   %1,%1\n"
36                            "    cs    %1,%0,0(%2)\n"
37                            "    jl    0b\n"
38                            : "=&d" (reg1), "=&d" (reg2)
39                            : "a" (&lp->lock) : "cc", "memory" );
40 }
41
42 extern inline int spin_trylock(spinlock_t *lp)
43 {
44         unsigned long result, reg;
45         __asm__ __volatile("    slr   %0,%0\n"
46                            "    basr  %1,0\n"
47                            "0:  cs    %0,%1,0(%2)"
48                            : "=&d" (result), "=&d" (reg)
49                            : "a" (&lp->lock) : "cc", "memory" );
50         return !result;
51 }
52
53 extern inline void spin_unlock(spinlock_t *lp)
54 {
55         __asm__ __volatile("    xc 0(4,%0),0(%0)\n"
56                            "    bcr 15,0"
57                            : : "a" (&lp->lock) : "memory", "cc" );
58 }
59                 
60 /*
61  * Read-write spinlocks, allowing multiple readers
62  * but only one writer.
63  *
64  * NOTE! it is quite common to have readers in interrupts
65  * but no interrupt writers. For those circumstances we
66  * can "mix" irq-safe locks - any writer needs to get a
67  * irq-safe write-lock, but readers can get non-irqsafe
68  * read-locks.
69  */
70 typedef struct {
71         volatile unsigned long lock;
72         volatile unsigned long owner_pc;
73 } rwlock_t;
74
75 #define RW_LOCK_UNLOCKED (rwlock_t) { 0, 0 }
76
77 #define rwlock_init(x)  do { *(x) = RW_LOCK_UNLOCKED; } while(0)
78
79 #define read_lock(rw)   \
80         asm volatile("   l     2,0(%0)\n"   \
81                      "   j     1f\n"     \
82                      "0: diag  0,0,68\n" \
83                      "1: la    2,0(2)\n"     /* clear high (=write) bit */ \
84                      "   la    3,1(2)\n"     /* one more reader */ \
85                      "   cs    2,3,0(%0)\n"  /* try to write new value */ \
86                      "   jl    0b"       \
87                      : : "a" (&(rw)->lock) : "2", "3", "cc", "memory" )
88
89 #define read_unlock(rw) \
90         asm volatile("   l     2,0(%0)\n"   \
91                      "   j     1f\n"     \
92                      "0: diag  0,0,68\n" \
93                      "1: lr    3,2\n"    \
94                      "   ahi   3,-1\n"    /* one less reader */ \
95                      "   cs    2,3,0(%0)\n" \
96                      "   jl    0b"       \
97                      : : "a" (&(rw)->lock) : "2", "3", "cc", "memory" )
98
99 #define write_lock(rw) \
100         asm volatile("   lhi   3,1\n"    \
101                      "   sll   3,31\n"    /* new lock value = 0x80000000 */ \
102                      "   j     1f\n"     \
103                      "0: diag  0,0,68\n" \
104                      "1: slr   2,2\n"     /* old lock value must be 0 */ \
105                      "   cs    2,3,0(%0)\n" \
106                      "   jl    0b"       \
107                      : : "a" (&(rw)->lock) : "2", "3", "cc", "memory" )
108
109 #define write_unlock(rw) \
110         asm volatile("   slr   3,3\n"     /* new lock value = 0 */ \
111                      "   j     1f\n"     \
112                      "0: diag  0,0,68\n" \
113                      "1: lhi   2,1\n"    \
114                      "   sll   2,31\n"    /* old lock value must be 0x80000000 */ \
115                      "   cs    2,3,0(%0)\n" \
116                      "   jl    0b"       \
117                      : : "a" (&(rw)->lock) : "2", "3", "cc", "memory" )
118
119 #endif /* __ASM_SPINLOCK_H */