make oldconfig will rebuild these...
[linux-2.4.21-pre4.git] / include / asm-mips64 / system.h
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999 by Ralf Baechle
7  * Modified further for R[236]000 by Paul M. Antoine, 1996
8  * Copyright (C) 1999 Silicon Graphics
9  */
10 #ifndef _ASM_SYSTEM_H
11 #define _ASM_SYSTEM_H
12
13 #include <linux/config.h>
14 #include <asm/sgidefs.h>
15
16 #include <linux/kernel.h>
17
18 #include <asm/addrspace.h>
19 #include <asm/ptrace.h>
20
21 __asm__ (
22         ".macro\t__sti\n\t"
23         ".set\tpush\n\t"
24         ".set\treorder\n\t"
25         ".set\tnoat\n\t"
26         "mfc0\t$1,$12\n\t"
27         "ori\t$1,0x1f\n\t"
28         "xori\t$1,0x1e\n\t"
29         "mtc0\t$1,$12\n\t"
30         ".set\tpop\n\t"
31         ".endm");
32
33 extern __inline__ void
34 __sti(void)
35 {
36         __asm__ __volatile__(
37                 "__sti"
38                 : /* no outputs */
39                 : /* no inputs */
40                 : "memory");
41 }
42
43 /*
44  * For cli() we have to insert nops to make sure that the new value
45  * has actually arrived in the status register before the end of this
46  * macro.
47  * R4000/R4400 need three nops, the R4600 two nops and the R10000 needs
48  * no nops at all.
49  */
50 __asm__ (
51         ".macro\t__cli\n\t"
52         ".set\tpush\n\t"
53         ".set\treorder\n\t"
54         ".set\tnoat\n\t"
55         "mfc0\t$1,$12\n\t"
56         "ori\t$1,1\n\t"
57         "xori\t$1,1\n\t"
58         ".set\tnoreorder\n\t"
59         "mtc0\t$1,$12\n\t"
60         "sll\t$0, $0, 1\t\t\t# nop\n\t"
61         "sll\t$0, $0, 1\t\t\t# nop\n\t"
62         "sll\t$0, $0, 1\t\t\t# nop\n\t"
63         ".set\tpop\n\t"
64         ".endm");
65
66 extern __inline__ void
67 __cli(void)
68 {
69         __asm__ __volatile__(
70                 "__cli"
71                 : /* no outputs */
72                 : /* no inputs */
73                 : "memory");
74 }
75
76 __asm__ (
77         ".macro\t__save_flags flags\n\t"
78         ".set\tpush\n\t"
79         ".set\treorder\n\t"
80         "mfc0\t\\flags, $12\n\t"
81         ".set\tpop\n\t"
82         ".endm");
83
84 #define __save_flags(x)                                                 \
85 __asm__ __volatile__(                                                   \
86         "__save_flags %0"                                               \
87         : "=r" (x))
88
89 __asm__ (
90         ".macro\t__save_and_cli result\n\t"
91         ".set\tpush\n\t"
92         ".set\treorder\n\t"
93         ".set\tnoat\n\t"
94         "mfc0\t\\result, $12\n\t"
95         "ori\t$1, \\result, 1\n\t"
96         "xori\t$1, 1\n\t"
97         ".set\tnoreorder\n\t"
98         "mtc0\t$1, $12\n\t"
99         "sll\t$0, $0, 1\t\t\t# nop\n\t"
100         "sll\t$0, $0, 1\t\t\t# nop\n\t"
101         "sll\t$0, $0, 1\t\t\t# nop\n\t"
102         ".set\tpop\n\t"
103         ".endm");
104
105 #define __save_and_cli(x)                                               \
106 __asm__ __volatile__(                                                   \
107         "__save_and_cli\t%0"                                            \
108         : "=r" (x)                                                      \
109         : /* no inputs */                                               \
110         : "memory")
111
112 #define __save_and_sti(x)       do { __save_flags(x); __sti(); } while(0);
113
114 __asm__(".macro\t__restore_flags flags\n\t"
115         ".set\tnoreorder\n\t"
116         ".set\tnoat\n\t"
117         "mfc0\t$1, $12\n\t"
118         "andi\t\\flags, 1\n\t"
119         "ori\t$1, 1\n\t"
120         "xori\t$1, 1\n\t"
121         "or\t\\flags, $1\n\t"
122         "mtc0\t\\flags, $12\n\t"
123         "sll\t$0, $0, 1\t\t\t# nop\n\t"
124         "sll\t$0, $0, 1\t\t\t# nop\n\t"
125         "sll\t$0, $0, 1\t\t\t# nop\n\t"
126         ".set\tat\n\t"
127         ".set\treorder\n\t"
128         ".endm");
129
130 #define __restore_flags(flags)                                          \
131 do {                                                                    \
132         unsigned long __tmp1;                                           \
133                                                                         \
134         __asm__ __volatile__(                                           \
135                 "__restore_flags\t%0"                                   \
136                 : "=r" (__tmp1)                                         \
137                 : "0" (flags)                                           \
138                 : "memory");                                            \
139 } while(0)
140
141 #ifdef CONFIG_SMP
142
143 extern void __global_cli(void);
144 extern void __global_sti(void);
145 extern unsigned long __global_save_flags(void);
146 extern void __global_restore_flags(unsigned long);
147 #define cli() __global_cli()
148 #define sti() __global_sti()
149 #define save_flags(x) ((x)=__global_save_flags())
150 #define restore_flags(x) __global_restore_flags(x)
151 #define save_and_cli(x) do { save_flags(x); cli(); } while(0)
152 #define save_and_sti(x) do { save_flags(x); sti(); } while(0)
153
154 #else
155
156 #define cli() __cli()
157 #define sti() __sti()
158 #define save_flags(x) __save_flags(x)
159 #define restore_flags(x) __restore_flags(x)
160 #define save_and_cli(x) __save_and_cli(x)
161 #define save_and_sti(x) __save_and_sti(x)
162
163 #endif /* CONFIG_SMP */
164
165 /* For spinlocks etc */
166 #define local_irq_save(x)       __save_and_cli(x)
167 #define local_irq_set(x)        __save_and_sti(x)
168 #define local_irq_restore(x)    __restore_flags(x)
169 #define local_irq_disable()     __cli()
170 #define local_irq_enable()      __sti()
171
172 #define __sync()                                \
173         __asm__ __volatile__(                   \
174                 ".set   push\n\t"               \
175                 ".set   noreorder\n\t"          \
176                 "sync\n\t"                      \
177                 ".set   pop"                    \
178                 : /* no output */               \
179                 : /* no input */                \
180                 : "memory")
181
182 #define fast_wmb()      __sync()
183 #define fast_rmb()      __sync()
184 #define fast_mb()       __sync()
185 #define fast_iob()                              \
186         do {                                    \
187                 __sync();                       \
188                 __asm__ __volatile__(           \
189                         ".set   push\n\t"       \
190                         ".set   noreorder\n\t"  \
191                         "lw     $0,%0\n\t"      \
192                         "nop\n\t"               \
193                         ".set   pop"            \
194                         : /* no output */       \
195                         : "m" (*(int *)KSEG1)   \
196                         : "memory");            \
197         } while (0)
198
199 #define wmb()           fast_wmb()
200 #define rmb()           fast_rmb()
201 #define mb()            fast_mb()
202 #define iob()           fast_iob()
203
204 #ifdef CONFIG_SMP
205 #define smp_mb()        mb()
206 #define smp_rmb()       rmb()
207 #define smp_wmb()       wmb()
208 #else
209 #define smp_mb()        barrier()
210 #define smp_rmb()       barrier()
211 #define smp_wmb()       barrier()
212 #endif
213
214 #define set_mb(var, value) \
215 do { var = value; mb(); } while (0)
216
217 #define set_wmb(var, value) \
218 do { var = value; wmb(); } while (0)
219
220 /*
221  * switch_to(n) should switch tasks to task nr n, first
222  * checking that n isn't the current task, in which case it does nothing.
223  */
224 extern asmlinkage void *resume(void *last, void *next);
225
226 #define prepare_to_switch()     do { } while(0)
227
228 struct task_struct;
229
230 extern asmlinkage void lazy_fpu_switch(void *, void *);
231 extern asmlinkage void init_fpu(void);
232 extern asmlinkage void save_fp(struct task_struct *);
233 extern asmlinkage void restore_fp(struct task_struct *);
234
235 #ifdef CONFIG_SMP
236 #define SWITCH_DO_LAZY_FPU \
237         if (prev->flags & PF_USEDFPU) { \
238                 lazy_fpu_switch(prev, 0); \
239                 clear_cp0_status(ST0_CU1); \
240                 prev->flags &= ~PF_USEDFPU; \
241         }
242 #else /* CONFIG_SMP */
243 #define SWITCH_DO_LAZY_FPU      do { } while(0)
244 #endif /* CONFIG_SMP */
245
246 #define switch_to(prev,next,last) \
247 do { \
248         SWITCH_DO_LAZY_FPU; \
249         (last) = resume(prev, next); \
250 } while(0)
251
252 extern __inline__ unsigned long xchg_u32(volatile int * m, unsigned long val)
253 {
254         unsigned long dummy;
255
256         __asm__ __volatile__(
257                 ".set\tpush\t\t\t\t# xchg_u32\n\t"
258                 ".set\tnoreorder\n\t"
259                 ".set\tnomacro\n\t"
260                 "ll\t%0, %3\n"
261                 "1:\tmove\t%2, %z4\n\t"
262                 "sc\t%2, %1\n\t"
263                 "beqzl\t%2, 1b\n\t"
264                 " ll\t%0, %3\n\t"
265                 "sync\n\t"
266                 ".set\tpop"
267                 : "=&r" (val), "=m" (*m), "=&r" (dummy)
268                 : "R" (*m), "Jr" (val)
269                 : "memory");
270
271         return val;
272 }
273
274 extern __inline__ unsigned long xchg_u64(volatile int * m, unsigned long val)
275 {
276         unsigned long dummy;
277
278         __asm__ __volatile__(
279                 ".set\tpush\t\t\t\t# xchg_u64\n\t"
280                 ".set\tnoreorder\n\t"
281                 ".set\tnomacro\n\t"
282                 "lld\t%0, %3\n"
283                 "1:\tmove\t%2, %z4\n\t"
284                 "scd\t%2, %1\n\t"
285                 "beqzl\t%2, 1b\n\t"
286                 " lld\t%0, %3\n\t"
287                 "sync\n\t"
288                 ".set\tpop"
289                 : "=&r" (val), "=m" (*m), "=&r" (dummy)
290                 : "R" (*m), "Jr" (val)
291                 : "memory");
292
293         return val;
294 }
295
296 #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
297 #define tas(ptr) (xchg((ptr),1))
298
299
300 static inline unsigned long __xchg(unsigned long x, volatile void * ptr,
301                                    int size)
302 {
303         switch (size) {
304                 case 4:
305                         return xchg_u32(ptr, x);
306                 case 8:
307                         return xchg_u64(ptr, x);
308         }
309         return x;
310 }
311
312 extern void *set_except_vector(int n, void *addr);
313
314 extern void __die(const char *, struct pt_regs *, const char *file,
315         const char *func, unsigned long line) __attribute__((noreturn));
316 extern void __die_if_kernel(const char *, struct pt_regs *, const char *file,
317         const char *func, unsigned long line);
318
319 #define die(msg, regs)                                                  \
320         __die(msg, regs, __FILE__ ":", __FUNCTION__, __LINE__)
321 #define die_if_kernel(msg, regs)                                        \
322         __die_if_kernel(msg, regs, __FILE__ ":", __FUNCTION__, __LINE__)
323
324 #endif /* _ASM_SYSTEM_H */