cleanup
[linux-2.4.git] / include / asm-s390x / system.h
1 /*
2  *  include/asm-s390/system.h
3  *
4  *  S390 version
5  *    Copyright (C) 1999 IBM Deutschland Entwicklung GmbH, IBM Corporation
6  *    Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com),
7  *
8  *  Derived from "include/asm-i386/system.h"
9  */
10
11 #ifndef __ASM_SYSTEM_H
12 #define __ASM_SYSTEM_H
13
14 #include <linux/config.h>
15 #include <asm/types.h>
16 #ifdef __KERNEL__
17 #include <asm/lowcore.h>
18 #endif
19 #include <linux/kernel.h>
20
21 #define prepare_to_switch()     do { } while(0)
22 #define switch_to(prev,next,last) do {                                       \
23         if (prev == next)                                                    \
24                 break;                                                       \
25         save_fp_regs(&prev->thread.fp_regs);                                 \
26         restore_fp_regs(&next->thread.fp_regs);                              \
27         last = resume(prev,next);                                            \
28 } while (0)
29
30 struct task_struct;
31
32 #define nop() __asm__ __volatile__ ("nop")
33
34 #define xchg(ptr,x) \
35   ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(void *)(ptr),sizeof(*(ptr))))
36
37 extern void __misaligned_u16(void);
38 extern void __misaligned_u32(void);
39 extern void __misaligned_u64(void);
40
41 static inline unsigned long __xchg(unsigned long x, void * ptr, int size)
42 {
43         switch (size) {
44                 case 1:
45                         asm volatile (
46                                 "   lghi  1,3\n"
47                                 "   nr    1,%0\n"     /* isolate last 2 bits */
48                                 "   xr    %0,1\n"     /* align ptr */
49                                 "   bras  2,0f\n"
50                                 "   icm   1,8,7(%1)\n"   /* for ptr&3 == 0 */
51                                 "   stcm  0,8,7(%1)\n"
52                                 "   icm   1,4,7(%1)\n"   /* for ptr&3 == 1 */
53                                 "   stcm  0,4,7(%1)\n"
54                                 "   icm   1,2,7(%1)\n"   /* for ptr&3 == 2 */
55                                 "   stcm  0,2,7(%1)\n"
56                                 "   icm   1,1,7(%1)\n"   /* for ptr&3 == 3 */
57                                 "   stcm  0,1,7(%1)\n"
58                                 "0: sll   1,3\n"
59                                 "   la    2,0(1,2)\n" /* r2 points to an icm */
60                                 "   l     0,0(%0)\n"  /* get fullword */
61                                 "1: lr    1,0\n"      /* cs loop */
62                                 "   ex    0,0(2)\n"   /* insert x */
63                                 "   cs    0,1,0(%0)\n"
64                                 "   jl    1b\n"
65                                 "   ex    0,4(2)"     /* store *ptr to x */
66                                 : "+&a" (ptr) : "a" (&x)
67                                 : "memory", "cc", "0", "1", "2");
68                         break;
69                 case 2:
70                         if(((addr_t)ptr)&1)
71                                 __misaligned_u16();
72                         asm volatile (
73                                 "   lghi  1,2\n"
74                                 "   nr    1,%0\n"     /* isolate bit 2^1 */
75                                 "   xr    %0,1\n"     /* align ptr */
76                                 "   bras  2,0f\n"
77                                 "   icm   1,12,6(%1)\n"   /* for ptr&2 == 0 */
78                                 "   stcm  0,12,6(%1)\n"
79                                 "   icm   1,3,2(%1)\n"    /* for ptr&2 == 1 */
80                                 "   stcm  0,3,2(%1)\n"
81                                 "0: sll   1,2\n"
82                                 "   la    2,0(1,2)\n" /* r2 points to an icm */
83                                 "   l     0,0(%0)\n"  /* get fullword */
84                                 "1: lr    1,0\n"      /* cs loop */
85                                 "   ex    0,0(2)\n"   /* insert x */
86                                 "   cs    0,1,0(%0)\n"
87                                 "   jl    1b\n"
88                                 "   ex    0,4(2)"     /* store *ptr to x */
89                                 : "+&a" (ptr) : "a" (&x)
90                                 : "memory", "cc", "0", "1", "2");
91                         break;
92                 case 4:
93                         if(((addr_t)ptr)&3)
94                                 __misaligned_u32();
95                         asm volatile (
96                                 "    l    0,0(%1)\n"
97                                 "0:  cs   0,%0,0(%1)\n"
98                                 "    jl   0b\n"
99                                 "    lgfr %0,0\n"
100                                 : "+d" (x) : "a" (ptr)
101                                 : "memory", "cc", "0" );
102                         break;
103                 case 8:
104                         if(((addr_t)ptr)&7)
105                                 __misaligned_u64();
106                         asm volatile (
107                                 "    lg  0,0(%1)\n"
108                                 "0:  csg 0,%0,0(%1)\n"
109                                 "    jl  0b\n"
110                                 "    lgr %0,0\n"
111                                 : "+d" (x) : "a" (ptr)
112                                 : "memory", "cc", "0" );
113                         break;
114         }
115         return x;
116 }
117
118 /*
119  * Force strict CPU ordering.
120  * And yes, this is required on UP too when we're talking
121  * to devices.
122  *
123  * This is very similar to the ppc eieio/sync instruction in that is
124  * does a checkpoint syncronisation & makes sure that 
125  * all memory ops have completed wrt other CPU's ( see 7-15 POP  DJB ).
126  */
127
128 #define eieio()  __asm__ __volatile__ ("BCR 15,0") 
129 # define SYNC_OTHER_CORES(x)   eieio() 
130 #define mb()    eieio()
131 #define rmb()   eieio()
132 #define wmb()   eieio()
133 #define smp_mb()       mb()
134 #define smp_rmb()      rmb()
135 #define smp_wmb()      wmb()
136 #define smp_mb__before_clear_bit()     smp_mb()
137 #define smp_mb__after_clear_bit()      smp_mb()
138
139 #define set_mb(var, value)      do { var = value; mb(); } while (0)
140 #define set_wmb(var, value)     do { var = value; wmb(); } while (0)
141
142 /* interrupt control.. */
143 #define __sti() ({ \
144         unsigned long dummy; \
145         __asm__ __volatile__ ( \
146                 "stosm 0(%0),0x03" : : "a" (&dummy) : "memory"); \
147         })
148
149 #define __cli() ({ \
150         unsigned long flags; \
151         __asm__ __volatile__ ( \
152                 "stnsm 0(%0),0xFC" : : "a" (&flags) : "memory"); \
153         flags; \
154         })
155
156 #define __save_flags(x) \
157         __asm__ __volatile__("stosm 0(%0),0" : : "a" (&x) : "memory")
158
159 #define __restore_flags(x) \
160         __asm__ __volatile__("ssm   0(%0)" : : "a" (&x) : "memory")
161
162 #define __load_psw(psw) \
163         __asm__ __volatile__("lpswe 0(%0)" : : "a" (&psw) : "cc" );
164
165 #define __ctl_load(array, low, high) ({ \
166         __asm__ __volatile__ ( \
167                 "   la    1,%0\n" \
168                 "   bras  2,0f\n" \
169                 "   lctlg 0,0,0(1)\n" \
170                 "0: ex    %1,0(2)" \
171                 : : "m" (array), "a" (((low)<<4)+(high)) : "1", "2" ); \
172         })
173
174 #define __ctl_store(array, low, high) ({ \
175         __asm__ __volatile__ ( \
176                 "   la    1,%0\n" \
177                 "   bras  2,0f\n" \
178                 "   stctg 0,0,0(1)\n" \
179                 "0: ex    %1,0(2)" \
180                 : "=m" (array) : "a" (((low)<<4)+(high)): "1", "2" ); \
181         })
182
183 #define __ctl_set_bit(cr, bit) ({ \
184         __u8 dummy[24]; \
185         __asm__ __volatile__ ( \
186                 "    la    1,%0\n"       /* align to 8 byte */ \
187                 "    aghi  1,7\n" \
188                 "    nill  1,0xfff8\n" \
189                 "    bras  2,0f\n"       /* skip indirect insns */ \
190                 "    stctg 0,0,0(1)\n" \
191                 "    lctlg 0,0,0(1)\n" \
192                 "0:  ex    %1,0(2)\n"    /* execute stctl */ \
193                 "    lg    0,0(1)\n" \
194                 "    ogr   0,%2\n"       /* set the bit */ \
195                 "    stg   0,0(1)\n" \
196                 "1:  ex    %1,6(2)"      /* execute lctl */ \
197                 : "=m" (dummy) : "a" (cr*17), "a" (1L<<(bit)) \
198                 : "cc", "0", "1", "2"); \
199         })
200
201 #define __ctl_clear_bit(cr, bit) ({ \
202         __u8 dummy[24]; \
203         __asm__ __volatile__ ( \
204                 "    la    1,%0\n"       /* align to 8 byte */ \
205                 "    aghi  1,7\n" \
206                 "    nill  1,0xfff8\n" \
207                 "    bras  2,0f\n"       /* skip indirect insns */ \
208                 "    stctg 0,0,0(1)\n" \
209                 "    lctlg 0,0,0(1)\n" \
210                 "0:  ex    %1,0(2)\n"    /* execute stctl */ \
211                 "    lg    0,0(1)\n" \
212                 "    ngr   0,%2\n"       /* set the bit */ \
213                 "    stg   0,0(1)\n" \
214                 "1:  ex    %1,6(2)"      /* execute lctl */ \
215                 : "=m" (dummy) : "a" (cr*17), "a" (~(1L<<(bit))) \
216                 : "cc", "0", "1", "2"); \
217         })
218
219
220 #define __save_and_cli(x)       do { __save_flags(x); __cli(); } while(0);
221 #define __save_and_sti(x)       do { __save_flags(x); __sti(); } while(0);
222
223 /* For spinlocks etc */
224 #define local_irq_save(x)       ((x) = __cli())
225 #define local_irq_set(x)        __save_and_sti(x)
226 #define local_irq_restore(x)    __restore_flags(x)
227 #define local_irq_disable()     __cli()
228 #define local_irq_enable()      __sti()
229
230 #ifdef CONFIG_SMP
231
232 extern void __global_cli(void);
233 extern void __global_sti(void);
234
235 extern unsigned long __global_save_flags(void);
236 extern void __global_restore_flags(unsigned long);
237 #define cli() __global_cli()
238 #define sti() __global_sti()
239 #define save_flags(x) ((x)=__global_save_flags())
240 #define restore_flags(x) __global_restore_flags(x)
241 #define save_and_cli(x) do { save_flags(x); cli(); } while(0);
242 #define save_and_sti(x) do { save_flags(x); sti(); } while(0);
243
244 extern void smp_ctl_set_bit(int cr, int bit);
245 extern void smp_ctl_clear_bit(int cr, int bit);
246 #define ctl_set_bit(cr, bit) smp_ctl_set_bit(cr, bit)
247 #define ctl_clear_bit(cr, bit) smp_ctl_clear_bit(cr, bit)
248
249 #else
250
251 #define cli() __cli()
252 #define sti() __sti()
253 #define save_flags(x) __save_flags(x)
254 #define restore_flags(x) __restore_flags(x)
255 #define save_and_cli(x) __save_and_cli(x)
256 #define save_and_sti(x) __save_and_sti(x)
257
258 #define ctl_set_bit(cr, bit) __ctl_set_bit(cr, bit)
259 #define ctl_clear_bit(cr, bit) __ctl_clear_bit(cr, bit)
260
261
262 #endif
263
264 #ifdef __KERNEL__
265 extern struct task_struct *resume(void *, void *);
266
267 extern int save_fp_regs1(s390_fp_regs *fpregs);
268 extern void save_fp_regs(s390_fp_regs *fpregs);
269 extern int restore_fp_regs1(s390_fp_regs *fpregs);
270 extern void restore_fp_regs(s390_fp_regs *fpregs);
271
272 extern void (*_machine_restart)(char *command);
273 extern void (*_machine_halt)(void);
274 extern void (*_machine_power_off)(void);
275
276 #endif
277
278 #endif
279
280
281