1 #ifndef __ARCH_S390_ATOMIC__
2 #define __ARCH_S390_ATOMIC__
5 * include/asm-s390/atomic.h
8 * Copyright (C) 1999,2000 IBM Deutschland Entwicklung GmbH, IBM Corporation
9 * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com),
12 * Derived from "include/asm-i386/bitops.h"
13 * Copyright (C) 1992, Linus Torvalds
18 * Atomic operations that C can't guarantee us. Useful for
19 * resource counting etc..
20 * S390 uses 'Compare And Swap' for atomicity in SMP enviroment
23 typedef struct { volatile int counter; } __attribute__ ((aligned (4))) atomic_t;
24 #define ATOMIC_INIT(i) { (i) }
26 #define atomic_eieio() __asm__ __volatile__ ("BCR 15,0")
28 #define __CS_LOOP(old_val, new_val, ptr, op_val, op_string) \
29 __asm__ __volatile__(" l %0,0(%2)\n" \
31 op_string " %1,%3\n" \
34 : "=&d" (old_val), "=&d" (new_val) \
35 : "a" (ptr), "d" (op_val) : "cc" );
37 #define atomic_read(v) ((v)->counter)
38 #define atomic_set(v,i) (((v)->counter) = (i))
40 static __inline__ void atomic_add(int i, atomic_t *v)
43 __CS_LOOP(old_val, new_val, v, i, "ar");
46 static __inline__ int atomic_add_return (int i, atomic_t *v)
49 __CS_LOOP(old_val, new_val, v, i, "ar");
53 static __inline__ int atomic_add_negative(int i, atomic_t *v)
56 __CS_LOOP(old_val, new_val, v, i, "ar");
60 static __inline__ void atomic_sub(int i, atomic_t *v)
63 __CS_LOOP(old_val, new_val, v, i, "sr");
66 static __inline__ void atomic_inc(volatile atomic_t *v)
69 __CS_LOOP(old_val, new_val, v, 1, "ar");
72 static __inline__ int atomic_inc_return(volatile atomic_t *v)
75 __CS_LOOP(old_val, new_val, v, 1, "ar");
79 static __inline__ int atomic_inc_and_test(volatile atomic_t *v)
82 __CS_LOOP(old_val, new_val, v, 1, "ar");
86 static __inline__ void atomic_dec(volatile atomic_t *v)
89 __CS_LOOP(old_val, new_val, v, 1, "sr");
92 static __inline__ int atomic_dec_return(volatile atomic_t *v)
95 __CS_LOOP(old_val, new_val, v, 1, "sr");
99 static __inline__ int atomic_dec_and_test(volatile atomic_t *v)
101 int old_val, new_val;
102 __CS_LOOP(old_val, new_val, v, 1, "sr");
106 static __inline__ void atomic_clear_mask(unsigned long mask, atomic_t *v)
108 int old_val, new_val;
109 __CS_LOOP(old_val, new_val, v, ~mask, "nr");
112 static __inline__ void atomic_set_mask(unsigned long mask, atomic_t *v)
114 int old_val, new_val;
115 __CS_LOOP(old_val, new_val, v, mask, "or");
119 returns 0 if expected_oldval==value in *v ( swap was successful )
120 returns 1 if unsuccessful.
122 static __inline__ int
123 atomic_compare_and_swap(int expected_oldval,int new_val,atomic_t *v)
127 __asm__ __volatile__(
134 : "a" (v), "d" (expected_oldval) , "d" (new_val)
140 Spin till *v = expected_oldval then swap with newval.
142 static __inline__ void
143 atomic_compare_and_swap_spin(int expected_oldval,int new_val,atomic_t *v)
145 __asm__ __volatile__(
149 : : "a" (v), "d" (expected_oldval) , "d" (new_val)
153 #define atomic_compare_and_swap_debug(where,from,to) \
154 if (atomic_compare_and_swap ((from), (to), (where))) {\
155 printk (KERN_WARNING"%s/%d atomic counter:%s couldn't be changed from %d(%s) to %d(%s), was %d\n",\
156 __FILE__,__LINE__,#where,(from),#from,(to),#to,atomic_read (where));\
157 atomic_set(where,(to));\
160 #define smp_mb__before_atomic_dec() smp_mb()
161 #define smp_mb__after_atomic_dec() smp_mb()
162 #define smp_mb__before_atomic_inc() smp_mb()
163 #define smp_mb__after_atomic_inc() smp_mb()
165 #endif /* __ARCH_S390_ATOMIC __ */