1 #ifndef __ARCH_I386_ATOMIC__
2 #define __ARCH_I386_ATOMIC__
4 #include <linux/config.h>
7 * Atomic operations that C can't guarantee us. Useful for
8 * resource counting etc..
12 #define LOCK "lock ; "
18 * Make sure gcc doesn't try to be clever and move things around
19 * on us. We need to use _exactly_ the address the user gave us,
20 * not some alias that contains the same information.
22 typedef struct { volatile int counter; } atomic_t;
24 #define ATOMIC_INIT(i) { (i) }
27 * atomic_read - read atomic variable
28 * @v: pointer of type atomic_t
30 * Atomically reads the value of @v. Note that the guaranteed
31 * useful range of an atomic_t is only 24 bits.
33 #define atomic_read(v) ((v)->counter)
36 * atomic_set - set atomic variable
37 * @v: pointer of type atomic_t
40 * Atomically sets the value of @v to @i. Note that the guaranteed
41 * useful range of an atomic_t is only 24 bits.
43 #define atomic_set(v,i) (((v)->counter) = (i))
46 * atomic_add - add integer to atomic variable
47 * @i: integer value to add
48 * @v: pointer of type atomic_t
50 * Atomically adds @i to @v. Note that the guaranteed useful range
51 * of an atomic_t is only 24 bits.
53 static __inline__ void atomic_add(int i, atomic_t *v)
58 :"ir" (i), "m" (v->counter));
62 * atomic_sub - subtract the atomic variable
63 * @i: integer value to subtract
64 * @v: pointer of type atomic_t
66 * Atomically subtracts @i from @v. Note that the guaranteed
67 * useful range of an atomic_t is only 24 bits.
69 static __inline__ void atomic_sub(int i, atomic_t *v)
74 :"ir" (i), "m" (v->counter));
78 * atomic_sub_and_test - subtract value from variable and test result
79 * @i: integer value to subtract
80 * @v: pointer of type atomic_t
82 * Atomically subtracts @i from @v and returns
83 * true if the result is zero, or false for all
84 * other cases. Note that the guaranteed
85 * useful range of an atomic_t is only 24 bits.
87 static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
92 LOCK "subl %2,%0; sete %1"
93 :"=m" (v->counter), "=qm" (c)
94 :"ir" (i), "m" (v->counter) : "memory");
99 * atomic_inc - increment atomic variable
100 * @v: pointer of type atomic_t
102 * Atomically increments @v by 1. Note that the guaranteed
103 * useful range of an atomic_t is only 24 bits.
105 static __inline__ void atomic_inc(atomic_t *v)
107 __asm__ __volatile__(
114 * atomic_dec - decrement atomic variable
115 * @v: pointer of type atomic_t
117 * Atomically decrements @v by 1. Note that the guaranteed
118 * useful range of an atomic_t is only 24 bits.
120 static __inline__ void atomic_dec(atomic_t *v)
122 __asm__ __volatile__(
129 * atomic_dec_and_test - decrement and test
130 * @v: pointer of type atomic_t
132 * Atomically decrements @v by 1 and
133 * returns true if the result is 0, or false for all other
134 * cases. Note that the guaranteed
135 * useful range of an atomic_t is only 24 bits.
137 static __inline__ int atomic_dec_and_test(atomic_t *v)
141 __asm__ __volatile__(
142 LOCK "decl %0; sete %1"
143 :"=m" (v->counter), "=qm" (c)
144 :"m" (v->counter) : "memory");
149 * atomic_inc_and_test - increment and test
150 * @v: pointer of type atomic_t
152 * Atomically increments @v by 1
153 * and returns true if the result is zero, or false for all
154 * other cases. Note that the guaranteed
155 * useful range of an atomic_t is only 24 bits.
157 static __inline__ int atomic_inc_and_test(atomic_t *v)
161 __asm__ __volatile__(
162 LOCK "incl %0; sete %1"
163 :"=m" (v->counter), "=qm" (c)
164 :"m" (v->counter) : "memory");
169 * atomic_add_negative - add and test if negative
170 * @v: pointer of type atomic_t
171 * @i: integer value to add
173 * Atomically adds @i to @v and returns true
174 * if the result is negative, or false when
175 * result is greater than or equal to zero. Note that the guaranteed
176 * useful range of an atomic_t is only 24 bits.
178 static __inline__ int atomic_add_negative(int i, atomic_t *v)
182 __asm__ __volatile__(
183 LOCK "addl %2,%0; sets %1"
184 :"=m" (v->counter), "=qm" (c)
185 :"ir" (i), "m" (v->counter) : "memory");
189 /* These are x86-specific, used by some header files */
190 #define atomic_clear_mask(mask, addr) \
191 __asm__ __volatile__(LOCK "andl %0,%1" \
192 : : "r" (~(mask)),"m" (*addr) : "memory")
194 #define atomic_set_mask(mask, addr) \
195 __asm__ __volatile__(LOCK "orl %0,%1" \
196 : : "r" (mask),"m" (*addr) : "memory")
198 /* Atomic operations are already serializing on x86 */
199 #define smp_mb__before_atomic_dec() barrier()
200 #define smp_mb__after_atomic_dec() barrier()
201 #define smp_mb__before_atomic_inc() barrier()
202 #define smp_mb__after_atomic_inc() barrier()