cleanup
[linux-2.4.git] / include / asm-s390x / spinlock.h
1 /*
2  *  include/asm-s390/spinlock.h
3  *
4  *  S390 version
5  *    Copyright (C) 1999 IBM Deutschland Entwicklung GmbH, IBM Corporation
6  *    Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com)
7  *
8  *  Derived from "include/asm-i386/spinlock.h"
9  */
10
11 #ifndef __ASM_SPINLOCK_H
12 #define __ASM_SPINLOCK_H
13
14 /*
15  * Grmph, take care of %&#! user space programs that include
16  * asm/spinlock.h. The diagnose is only available in kernel
17  * context.
18  */
19 #ifdef __KERNEL__
20 #include <asm/lowcore.h>
21 #define __DIAG44_INSN "ex"
22 #define __DIAG44_OPERAND __LC_DIAG44_OPCODE
23 #else
24 #define __DIAG44_INSN "#"
25 #define __DIAG44_OPERAND 0
26 #endif
27
28 /*
29  * Simple spin lock operations.  There are two variants, one clears IRQ's
30  * on the local processor, one does not.
31  *
32  * We make no fairness assumptions. They have a cost.
33  */
34
35 typedef struct {
36         volatile unsigned int lock;
37 } __attribute__ ((aligned (4))) spinlock_t;
38
39 #define SPIN_LOCK_UNLOCKED (spinlock_t) { 0 }
40 #define spin_lock_init(lp) do { (lp)->lock = 0; } while(0)
41 #define spin_unlock_wait(lp)    do { barrier(); } while(((volatile spinlock_t *)(lp))->lock)
42 #define spin_is_locked(x) ((x)->lock != 0)
43
44 extern inline void spin_lock(spinlock_t *lp)
45 {
46         unsigned long reg1, reg2;
47         __asm__ __volatile("    bras  %1,1f\n"
48                            "0:  " __DIAG44_INSN " 0,%3\n"
49                            "1:  slr   %0,%0\n"
50                            "    cs    %0,%1,0(%2)\n"
51                            "    jl    0b\n"
52                            : "=&d" (reg1), "=&d" (reg2)
53                            : "a" (&lp->lock), "i" (__DIAG44_OPERAND)
54                            : "cc", "memory" );
55 }
56
57 extern inline int spin_trylock(spinlock_t *lp)
58 {
59         unsigned int result, reg;
60         __asm__ __volatile("    slr   %0,%0\n"
61                            "    basr  %1,0\n"
62                            "0:  cs    %0,%1,0(%2)"
63                            : "=&d" (result), "=&d" (reg)
64                            : "a" (&lp->lock) : "cc", "memory" );
65         return !result;
66 }
67
68 extern inline void spin_unlock(spinlock_t *lp)
69 {
70         __asm__ __volatile("    xc 0(4,%0),0(%0)\n"
71                            "    bcr 15,0"
72                            : : "a" (&lp->lock) : "memory", "cc" );
73 }
74                 
75 /*
76  * Read-write spinlocks, allowing multiple readers
77  * but only one writer.
78  *
79  * NOTE! it is quite common to have readers in interrupts
80  * but no interrupt writers. For those circumstances we
81  * can "mix" irq-safe locks - any writer needs to get a
82  * irq-safe write-lock, but readers can get non-irqsafe
83  * read-locks.
84  */
85 typedef struct {
86         volatile unsigned long lock;
87         volatile unsigned long owner_pc;
88 } rwlock_t;
89
90 #define RW_LOCK_UNLOCKED (rwlock_t) { 0, 0 }
91
92 #define rwlock_init(x)  do { *(x) = RW_LOCK_UNLOCKED; } while(0)
93
94 #define read_lock(rw)   \
95         asm volatile("   lg    2,0(%0)\n"   \
96                      "   j     1f\n"     \
97                      "0: " __DIAG44_INSN " 0,%1\n" \
98                      "1: nihh  2,0x7fff\n" /* clear high (=write) bit */ \
99                      "   la    3,1(2)\n"   /* one more reader */  \
100                      "   csg   2,3,0(%0)\n" /* try to write new value */ \
101                      "   jl    0b"       \
102                      : : "a" (&(rw)->lock), "i" (__DIAG44_OPERAND) \
103                      : "2", "3", "cc", "memory" )
104
105 #define read_unlock(rw) \
106         asm volatile("   lg    2,0(%0)\n"   \
107                      "   j     1f\n"     \
108                      "0: " __DIAG44_INSN " 0,%1\n" \
109                      "1: lgr   3,2\n"    \
110                      "   bctgr 3,0\n"    /* one less reader */ \
111                      "   csg   2,3,0(%0)\n" \
112                      "   jl    0b"       \
113                      : : "a" (&(rw)->lock), "i" (__DIAG44_OPERAND) \
114                      : "2", "3", "cc", "memory" )
115
116 #define write_lock(rw) \
117         asm volatile("   llihh 3,0x8000\n" /* new lock value = 0x80...0 */ \
118                      "   j     1f\n"       \
119                      "0: " __DIAG44_INSN " 0,%1\n"   \
120                      "1: slgr  2,2\n"      /* old lock value must be 0 */ \
121                      "   csg   2,3,0(%0)\n" \
122                      "   jl    0b"         \
123                      : : "a" (&(rw)->lock), "i" (__DIAG44_OPERAND) \
124                      : "2", "3", "cc", "memory" )
125
126 #define write_unlock(rw) \
127         asm volatile("   slgr  3,3\n"      /* new lock value = 0 */ \
128                      "   j     1f\n"       \
129                      "0: " __DIAG44_INSN " 0,%1\n"   \
130                      "1: llihh 2,0x8000\n" /* old lock value must be 0x8..0 */\
131                      "   csg   2,3,0(%0)\n"   \
132                      "   jl    0b"         \
133                      : : "a" (&(rw)->lock), "i" (__DIAG44_OPERAND) \
134                      : "2", "3", "cc", "memory" )
135
136 #endif /* __ASM_SPINLOCK_H */
137