1 #ifndef _ASM_GENERIC_ATOMIC_H
2 #define _ASM_GENERIC_ATOMIC_H
4 * Copyright (C) 2005 Silicon Graphics, Inc.
5 * Christoph Lameter <clameter@sgi.com>
7 * Allows to provide arch independent atomic definitions without the need to
8 * edit all arch specific atomic.h files.
11 #include <asm/types.h>
12 #include <asm/system.h>
15 * Suppport for atomic_long_t
17 * Casts for parameters are avoided for existing atomic functions in order to
18 * avoid issues with cast-as-lval under gcc 4.x and other limitations that the
19 * macros of a platform may have.
22 #if BITS_PER_LONG == 64
24 typedef atomic64_t atomic_long_t;
26 #define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)
28 static inline long atomic_long_read(atomic_long_t *l)
30 atomic64_t *v = (atomic64_t *)l;
32 return (long)atomic64_read(v);
35 static inline void atomic_long_set(atomic_long_t *l, long i)
37 atomic64_t *v = (atomic64_t *)l;
42 static inline void atomic_long_inc(atomic_long_t *l)
44 atomic64_t *v = (atomic64_t *)l;
49 static inline void atomic_long_dec(atomic_long_t *l)
51 atomic64_t *v = (atomic64_t *)l;
56 static inline void atomic_long_add(long i, atomic_long_t *l)
58 atomic64_t *v = (atomic64_t *)l;
63 static inline void atomic_long_sub(long i, atomic_long_t *l)
65 atomic64_t *v = (atomic64_t *)l;
70 static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
72 atomic64_t *v = (atomic64_t *)l;
74 return atomic64_sub_and_test(i, v);
77 static inline int atomic_long_dec_and_test(atomic_long_t *l)
79 atomic64_t *v = (atomic64_t *)l;
81 return atomic64_dec_and_test(v);
84 static inline int atomic_long_inc_and_test(atomic_long_t *l)
86 atomic64_t *v = (atomic64_t *)l;
88 return atomic64_inc_and_test(v);
91 static inline int atomic_long_add_negative(long i, atomic_long_t *l)
93 atomic64_t *v = (atomic64_t *)l;
95 return atomic64_add_negative(i, v);
98 static inline long atomic_long_add_return(long i, atomic_long_t *l)
100 atomic64_t *v = (atomic64_t *)l;
102 return (long)atomic64_add_return(i, v);
105 static inline long atomic_long_sub_return(long i, atomic_long_t *l)
107 atomic64_t *v = (atomic64_t *)l;
109 return (long)atomic64_sub_return(i, v);
112 static inline long atomic_long_inc_return(atomic_long_t *l)
114 atomic64_t *v = (atomic64_t *)l;
116 return (long)atomic64_inc_return(v);
119 static inline long atomic_long_dec_return(atomic_long_t *l)
121 atomic64_t *v = (atomic64_t *)l;
123 return (long)atomic64_dec_return(v);
126 #define atomic_long_add_unless(l, a, u) \
127 atomic64_add_unless((atomic64_t *)(l), (a), (u))
129 #define atomic_long_inc_not_zero(l) atomic64_inc_not_zero((atomic64_t *)(l))
131 #define atomic_long_cmpxchg(l, old, new) \
132 (atomic_cmpxchg((atomic64_t *)(l), (old), (new)))
133 #define atomic_long_xchg(v, new) \
134 (atomic_xchg((atomic64_t *)(l), (new)))
136 #else /* BITS_PER_LONG == 64 */
138 typedef atomic_t atomic_long_t;
140 #define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)
141 static inline long atomic_long_read(atomic_long_t *l)
143 atomic_t *v = (atomic_t *)l;
145 return (long)atomic_read(v);
148 static inline void atomic_long_set(atomic_long_t *l, long i)
150 atomic_t *v = (atomic_t *)l;
155 static inline void atomic_long_inc(atomic_long_t *l)
157 atomic_t *v = (atomic_t *)l;
162 static inline void atomic_long_dec(atomic_long_t *l)
164 atomic_t *v = (atomic_t *)l;
169 static inline void atomic_long_add(long i, atomic_long_t *l)
171 atomic_t *v = (atomic_t *)l;
176 static inline void atomic_long_sub(long i, atomic_long_t *l)
178 atomic_t *v = (atomic_t *)l;
183 static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
185 atomic_t *v = (atomic_t *)l;
187 return atomic_sub_and_test(i, v);
190 static inline int atomic_long_dec_and_test(atomic_long_t *l)
192 atomic_t *v = (atomic_t *)l;
194 return atomic_dec_and_test(v);
197 static inline int atomic_long_inc_and_test(atomic_long_t *l)
199 atomic_t *v = (atomic_t *)l;
201 return atomic_inc_and_test(v);
204 static inline int atomic_long_add_negative(long i, atomic_long_t *l)
206 atomic_t *v = (atomic_t *)l;
208 return atomic_add_negative(i, v);
211 static inline long atomic_long_add_return(long i, atomic_long_t *l)
213 atomic_t *v = (atomic_t *)l;
215 return (long)atomic_add_return(i, v);
218 static inline long atomic_long_sub_return(long i, atomic_long_t *l)
220 atomic_t *v = (atomic_t *)l;
222 return (long)atomic_sub_return(i, v);
225 static inline long atomic_long_inc_return(atomic_long_t *l)
227 atomic_t *v = (atomic_t *)l;
229 return (long)atomic_inc_return(v);
232 static inline long atomic_long_dec_return(atomic_long_t *l)
234 atomic_t *v = (atomic_t *)l;
236 return (long)atomic_dec_return(v);
239 #define atomic_long_add_unless(l, a, u) \
240 atomic_add_unless((atomic_t *)(l), (a), (u))
242 #define atomic_long_inc_not_zero(l) atomic_inc_not_zero((atomic_t *)(l))
244 #define atomic_long_cmpxchg(l, old, new) \
245 (atomic_cmpxchg((atomic_t *)(l), (old), (new)))
246 #define atomic_long_xchg(v, new) \
247 (atomic_xchg((atomic_t *)(l), (new)))
249 #endif /* BITS_PER_LONG == 64 */
251 #endif /* _ASM_GENERIC_ATOMIC_H */