more changes on original files
[linux-2.4.git] / arch / arm / kernel / entry-armv.S
1 /*
2  *  linux/arch/arm/kernel/entry-armv.S
3  *
4  *  Copyright (C) 1996,1997,1998 Russell King.
5  *  ARM700 fix by Matthew Godbolt (linux-user@willothewisp.demon.co.uk)
6  *
7  * This program is free software; you can redistribute it and/or modify
8  * it under the terms of the GNU General Public License version 2 as
9  * published by the Free Software Foundation.
10  *
11  *  Low-level vector interface routines
12  *
13  *  Note:  there is a StrongARM bug in the STMIA rn, {regs}^ instruction that causes
14  *  it to save wrong values...  Be aware!
15  */
16 #include <linux/config.h>
17 #include "entry-header.S"
18
19
20 #ifdef IOC_BASE
21 /* IOC / IOMD based hardware */
22 #include <asm/hardware/iomd.h>
23
24                 .equ    ioc_base_high, IOC_BASE & 0xff000000
25                 .equ    ioc_base_low, IOC_BASE & 0x00ff0000
26                 .macro  disable_fiq
27                 mov     r12, #ioc_base_high
28                 .if     ioc_base_low
29                 orr     r12, r12, #ioc_base_low
30                 .endif
31                 strb    r12, [r12, #0x38]       @ Disable FIQ register
32                 .endm
33
34                 .macro  get_irqnr_and_base, irqnr, irqstat, base, tmp
35                 mov     r4, #ioc_base_high              @ point at IOC
36                 .if     ioc_base_low
37                 orr     r4, r4, #ioc_base_low
38                 .endif
39                 ldrb    \irqstat, [r4, #IOMD_IRQREQB]   @ get high priority first
40                 ldr     \base, =irq_prio_h
41                 teq     \irqstat, #0
42 #ifdef IOMD_BASE
43                 ldreqb  \irqstat, [r4, #IOMD_DMAREQ]    @ get dma
44                 addeq   \base, \base, #256              @ irq_prio_h table size
45                 teqeq   \irqstat, #0
46                 bne     2406f
47 #endif
48                 ldreqb  \irqstat, [r4, #IOMD_IRQREQA]   @ get low priority
49                 addeq   \base, \base, #256              @ irq_prio_d table size
50                 teqeq   \irqstat, #0
51 #ifdef IOMD_IRQREQC
52                 ldreqb  \irqstat, [r4, #IOMD_IRQREQC]
53                 addeq   \base, \base, #256              @ irq_prio_l table size
54                 teqeq   \irqstat, #0
55 #endif
56 #ifdef IOMD_IRQREQD
57                 ldreqb  \irqstat, [r4, #IOMD_IRQREQD]
58                 addeq   \base, \base, #256              @ irq_prio_lc table size
59                 teqeq   \irqstat, #0
60 #endif
61 2406:           ldrneb  \irqnr, [\base, \irqstat]       @ get IRQ number
62                 .endm
63
64 /*
65  * Interrupt table (incorporates priority).  Please note that we
66  * rely on the order of these tables (see above code).
67  */
68                 .macro  irq_prio_table
69 irq_prio_h:     .byte    0, 8, 9, 8,10,10,10,10,11,11,11,11,10,10,10,10
70                 .byte   12, 8, 9, 8,10,10,10,10,11,11,11,11,10,10,10,10
71                 .byte   13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
72                 .byte   13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
73                 .byte   14,14,14,14,10,10,10,10,11,11,11,11,10,10,10,10
74                 .byte   14,14,14,14,10,10,10,10,11,11,11,11,10,10,10,10
75                 .byte   13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
76                 .byte   13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
77                 .byte   15,15,15,15,10,10,10,10,11,11,11,11,10,10,10,10
78                 .byte   15,15,15,15,10,10,10,10,11,11,11,11,10,10,10,10
79                 .byte   13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
80                 .byte   13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
81                 .byte   15,15,15,15,10,10,10,10,11,11,11,11,10,10,10,10
82                 .byte   15,15,15,15,10,10,10,10,11,11,11,11,10,10,10,10
83                 .byte   13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
84                 .byte   13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
85 #ifdef IOMD_BASE
86 irq_prio_d:     .byte    0,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
87                 .byte   20,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
88                 .byte   21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
89                 .byte   21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
90                 .byte   22,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
91                 .byte   22,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
92                 .byte   21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
93                 .byte   21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
94                 .byte   23,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
95                 .byte   23,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
96                 .byte   21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
97                 .byte   21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
98                 .byte   22,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
99                 .byte   22,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
100                 .byte   21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
101                 .byte   21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
102 #endif
103 irq_prio_l:     .byte    0, 0, 1, 0, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3
104                 .byte    4, 0, 1, 0, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3
105                 .byte    5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5
106                 .byte    5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5
107                 .byte    6, 6, 6, 6, 6, 6, 6, 6, 3, 3, 3, 3, 3, 3, 3, 3
108                 .byte    6, 6, 6, 6, 6, 6, 6, 6, 3, 3, 3, 3, 3, 3, 3, 3
109                 .byte    5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5
110                 .byte    5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5
111                 .byte    7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
112                 .byte    7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
113                 .byte    7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
114                 .byte    7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
115                 .byte    7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
116                 .byte    7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
117                 .byte    7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
118                 .byte    7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
119 #ifdef IOMD_IRQREQC
120 irq_prio_lc:    .byte   24,24,25,24,26,26,26,26,27,27,27,27,27,27,27,27
121                 .byte   28,24,25,24,26,26,26,26,27,27,27,27,27,27,27,27
122                 .byte   29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29
123                 .byte   29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29
124                 .byte   30,30,30,30,30,30,30,30,27,27,27,27,27,27,27,27
125                 .byte   30,30,30,30,30,30,30,30,27,27,27,27,27,27,27,27
126                 .byte   29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29
127                 .byte   29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29
128                 .byte   31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
129                 .byte   31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
130                 .byte   31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
131                 .byte   31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
132                 .byte   31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
133                 .byte   31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
134                 .byte   31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
135                 .byte   31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
136 #endif
137 #ifdef IOMD_IRQREQD
138 irq_prio_ld:    .byte   40,40,41,40,42,42,42,42,43,43,43,43,43,43,43,43
139                 .byte   44,40,41,40,42,42,42,42,43,43,43,43,43,43,43,43
140                 .byte   45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45
141                 .byte   45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45
142                 .byte   46,46,46,46,46,46,46,46,43,43,43,43,43,43,43,43
143                 .byte   46,46,46,46,46,46,46,46,43,43,43,43,43,43,43,43
144                 .byte   45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45
145                 .byte   45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45
146                 .byte   47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
147                 .byte   47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
148                 .byte   47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
149                 .byte   47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
150                 .byte   47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
151                 .byte   47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
152                 .byte   47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
153                 .byte   47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
154 #endif
155                 .endm
156
157 #elif defined(CONFIG_ARCH_EBSA110)
158
159 #define IRQ_STAT                0xff000000      /* read */
160
161                 .macro  disable_fiq
162                 .endm
163
164                 .macro  get_irqnr_and_base, irqnr, stat, base, tmp
165                 mov     \base, #IRQ_STAT
166                 ldrb    \stat, [\base]                  @ get interrupts
167                 mov     \irqnr, #0
168                 tst     \stat, #15
169                 addeq   \irqnr, \irqnr, #4
170                 moveq   \stat, \stat, lsr #4
171                 tst     \stat, #3
172                 addeq   \irqnr, \irqnr, #2
173                 moveq   \stat, \stat, lsr #2
174                 tst     \stat, #1
175                 addeq   \irqnr, \irqnr, #1
176                 moveq   \stat, \stat, lsr #1
177                 tst     \stat, #1                       @ bit 0 should be set
178                 .endm
179
180                 .macro  irq_prio_table
181                 .endm
182
183 #elif defined(CONFIG_ARCH_SHARK)
184
185                 .macro  disable_fiq
186                 .endm
187
188                 .macro  get_irqnr_and_base, irqnr, irqstat, base, tmp
189                 mov     r4, #0xe0000000
190
191                 mov     \irqstat, #0x0C
192                 strb    \irqstat, [r4, #0x20]           @outb(0x0C, 0x20) /* Poll command */
193                 ldrb    \irqnr, [r4, #0x20]             @irq = inb(0x20) & 7
194                 and     \irqstat, \irqnr, #0x80
195                 teq     \irqstat, #0
196                 beq     43f
197                 and     \irqnr, \irqnr, #7
198                 teq     \irqnr, #2
199                 bne     44f
200 43:             mov     \irqstat, #0x0C
201                 strb    \irqstat, [r4, #0xa0]           @outb(0x0C, 0xA0) /* Poll command */
202                 ldrb    \irqnr, [r4, #0xa0]             @irq = (inb(0xA0) & 7) + 8
203                 and     \irqstat, \irqnr, #0x80
204                 teq     \irqstat, #0
205                 beq     44f
206                 and     \irqnr, \irqnr, #7
207                 add     \irqnr, \irqnr, #8
208 44:             teq     \irqstat, #0
209                 .endm
210
211                 .macro  irq_prio_table
212                 .endm
213
214 #elif defined(CONFIG_FOOTBRIDGE)
215 #include <asm/hardware/dec21285.h>
216
217                 .macro  disable_fiq
218                 .endm
219
220                 .equ    dc21285_high, ARMCSR_BASE & 0xff000000
221                 .equ    dc21285_low, ARMCSR_BASE & 0x00ffffff
222
223                 .macro  get_irqnr_and_base, irqnr, irqstat, base, tmp
224                 mov     r4, #dc21285_high
225                 .if     dc21285_low
226                 orr     r4, r4, #dc21285_low
227                 .endif
228                 ldr     \irqstat, [r4, #0x180]          @ get interrupts
229
230                 mov     \irqnr, #IRQ_SDRAMPARITY
231                 tst     \irqstat, #IRQ_MASK_SDRAMPARITY
232                 bne     1001f
233
234                 tst     \irqstat, #IRQ_MASK_UART_RX
235                 movne   \irqnr, #IRQ_CONRX
236                 bne     1001f
237
238                 tst     \irqstat, #IRQ_MASK_DMA1
239                 movne   \irqnr, #IRQ_DMA1
240                 bne     1001f
241
242                 tst     \irqstat, #IRQ_MASK_DMA2
243                 movne   \irqnr, #IRQ_DMA2
244                 bne     1001f
245
246                 tst     \irqstat, #IRQ_MASK_IN0
247                 movne   \irqnr, #IRQ_IN0
248                 bne     1001f
249
250                 tst     \irqstat, #IRQ_MASK_IN1
251                 movne   \irqnr, #IRQ_IN1
252                 bne     1001f
253
254                 tst     \irqstat, #IRQ_MASK_IN2
255                 movne   \irqnr, #IRQ_IN2
256                 bne     1001f
257
258                 tst     \irqstat, #IRQ_MASK_IN3
259                 movne   \irqnr, #IRQ_IN3
260                 bne     1001f
261
262                 tst     \irqstat, #IRQ_MASK_PCI
263                 movne   \irqnr, #IRQ_PCI
264                 bne     1001f
265
266                 tst     \irqstat, #IRQ_MASK_DOORBELLHOST
267                 movne   \irqnr, #IRQ_DOORBELLHOST
268                 bne     1001f
269         
270                 tst     \irqstat, #IRQ_MASK_I2OINPOST
271                 movne   \irqnr, #IRQ_I2OINPOST
272                 bne     1001f
273
274                 tst     \irqstat, #IRQ_MASK_TIMER1
275                 movne   \irqnr, #IRQ_TIMER1
276                 bne     1001f
277
278                 tst     \irqstat, #IRQ_MASK_TIMER2
279                 movne   \irqnr, #IRQ_TIMER2
280                 bne     1001f
281
282                 tst     \irqstat, #IRQ_MASK_TIMER3
283                 movne   \irqnr, #IRQ_TIMER3
284                 bne     1001f
285
286                 tst     \irqstat, #IRQ_MASK_UART_TX
287                 movne   \irqnr, #IRQ_CONTX
288                 bne     1001f
289
290                 tst     \irqstat, #IRQ_MASK_PCI_ABORT
291                 movne   \irqnr, #IRQ_PCI_ABORT
292                 bne     1001f
293
294                 tst     \irqstat, #IRQ_MASK_PCI_SERR
295                 movne   \irqnr, #IRQ_PCI_SERR
296                 bne     1001f
297
298                 tst     \irqstat, #IRQ_MASK_DISCARD_TIMER
299                 movne   \irqnr, #IRQ_DISCARD_TIMER
300                 bne     1001f
301
302                 tst     \irqstat, #IRQ_MASK_PCI_DPERR
303                 movne   \irqnr, #IRQ_PCI_DPERR
304                 bne     1001f
305
306                 tst     \irqstat, #IRQ_MASK_PCI_PERR
307                 movne   \irqnr, #IRQ_PCI_PERR
308 1001:
309                 .endm
310
311                 .macro  irq_prio_table
312                 .endm
313
314 #elif defined(CONFIG_ARCH_NEXUSPCI)
315
316                 .macro  disable_fiq
317                 .endm
318
319                 .macro  get_irqnr_and_base, irqnr, irqstat, base, tmp
320                 ldr     \irqstat, =INTCONT_BASE
321                 ldr     \base, =soft_irq_mask
322                 ldr     \irqstat, [\irqstat]            @ get interrupts
323                 ldr     \base, [\base]
324                 mov     \irqnr, #0
325                 and     \irqstat, \irqstat, \base       @ mask out disabled ones
326 1001:           tst     \irqstat, #1
327                 addeq   \irqnr, \irqnr, #1
328                 moveq   \irqstat, \irqstat, lsr #1
329                 tsteq   \irqnr, #32
330                 beq     1001b
331                 teq     \irqnr, #32
332                 .endm
333
334                 .macro  irq_prio_table
335                 .ltorg
336                 .bss
337 ENTRY(soft_irq_mask)
338                 .word   0
339                 .text
340                 .endm
341
342 #elif defined(CONFIG_ARCH_TBOX)
343
344                 .macro  disable_fiq
345                 .endm
346
347                 .macro  get_irqnr_and_base, irqnr, irqstat, base, tmp
348                 ldr     \irqstat, =0xffff7000
349                 ldr     \irqstat, [\irqstat]            @ get interrupts
350                 ldr     \base, =soft_irq_mask
351                 ldr     \base, [\base]
352                 mov     \irqnr, #0
353                 and     \irqstat, \irqstat, \base       @ mask out disabled ones
354 1001:           tst     \irqstat, #1
355                 addeq   \irqnr, \irqnr, #1
356                 moveq   \irqstat, \irqstat, lsr #1
357                 tsteq   \irqnr, #32
358                 beq     1001b
359                 teq     \irqnr, #32
360                 .endm
361
362                 .macro  irq_prio_table
363                 .ltorg
364                 .bss
365 ENTRY(soft_irq_mask)
366                 .word   0
367                 .text
368                 .endm
369
370 #elif defined(CONFIG_ARCH_SA1100)
371
372                 .macro  disable_fiq
373                 .endm
374
375                 .macro  get_irqnr_and_base, irqnr, irqstat, base, tmp
376                 mov     r4, #0xfa000000                 @ ICIP = 0xfa050000
377                 add     r4, r4, #0x00050000
378                 ldr     \irqstat, [r4]                  @ get irqs
379                 ldr     \irqnr, [r4, #4]                @ ICMR = 0xfa050004
380                 ands    \irqstat, \irqstat, \irqnr
381                 mov     \irqnr, #0
382                 beq     1001f
383                 tst     \irqstat, #0xff
384                 moveq   \irqstat, \irqstat, lsr #8
385                 addeq   \irqnr, \irqnr, #8
386                 tsteq   \irqstat, #0xff
387                 moveq   \irqstat, \irqstat, lsr #8
388                 addeq   \irqnr, \irqnr, #8
389                 tsteq   \irqstat, #0xff
390                 moveq   \irqstat, \irqstat, lsr #8
391                 addeq   \irqnr, \irqnr, #8
392                 tst     \irqstat, #0x0f
393                 moveq   \irqstat, \irqstat, lsr #4
394                 addeq   \irqnr, \irqnr, #4
395                 tst     \irqstat, #0x03
396                 moveq   \irqstat, \irqstat, lsr #2
397                 addeq   \irqnr, \irqnr, #2
398                 tst     \irqstat, #0x01
399                 addeqs  \irqnr, \irqnr, #1
400 1001:
401                 .endm
402
403                 .macro  irq_prio_table
404                 .endm
405
406 #elif defined(CONFIG_ARCH_L7200)
407 #include <asm/hardware.h>
408         
409                 .equ    irq_base_addr,  IO_BASE_2
410
411                 .macro  disable_fiq
412                 .endm
413  
414                 .macro  get_irqnr_and_base, irqnr, irqstat, base, tmp
415                 mov     \irqstat, #irq_base_addr                @ Virt addr IRQ regs
416                 add     \irqstat, \irqstat, #0x00001000         @ Status reg
417                 ldr     \irqstat, [\irqstat, #0]                @ get interrupts
418                 mov     \irqnr, #0
419 1001:           tst     \irqstat, #1
420                 addeq   \irqnr, \irqnr, #1
421                 moveq   \irqstat, \irqstat, lsr #1
422                 tsteq   \irqnr, #32
423                 beq     1001b
424                 teq     \irqnr, #32
425                 .endm
426
427                 .macro  irq_prio_table
428                 .endm
429
430 #elif defined(CONFIG_ARCH_INTEGRATOR)
431
432                 .macro  disable_fiq
433                 .endm
434
435                 .macro  get_irqnr_and_base, irqnr, irqstat, base, tmp
436 /* FIXME: should not be using soo many LDRs here */
437                 ldr     \irqnr, =IO_ADDRESS(INTEGRATOR_IC_BASE)
438                 ldr     \irqstat, [\irqnr, #IRQ_STATUS]         @ get masked status
439                 ldr     \irqnr, =IO_ADDRESS(INTEGRATOR_HDR_BASE)
440                 ldr     \irqnr, [\irqnr, #(INTEGRATOR_HDR_IC_OFFSET+IRQ_STATUS)]
441                 orr     \irqstat, \irqstat, \irqnr, lsl #INTEGRATOR_CM_INT0
442
443                 mov     \irqnr, #0
444 1001:           tst     \irqstat, #1
445                 bne     1002f
446                 add     \irqnr, \irqnr, #1
447                 mov     \irqstat, \irqstat, lsr #1
448                 cmp     \irqnr, #22
449                 bcc     1001b
450 1002:           /* EQ will be set if we reach 22 */
451                 .endm
452
453                 .macro  irq_prio_table
454                 .endm
455
456 #elif defined(CONFIG_ARCH_AT91RM9200)
457 #include <asm/hardware.h>
458
459                 .macro  disable_fiq
460                 .endm
461
462                 .macro  get_irqnr_and_base, irqnr, irqstat, base, tmp
463                 ldr     \base, =(AT91C_VA_BASE_SYS)     @ base virtual address of SYS peripherals
464                 ldr     \irqnr, [\base, #AIC_IVR]       @ read IRQ vector register: de-asserts nIRQ to processor (and clears interrupt)
465                 ldr     \irqstat, [\base, #AIC_ISR]     @ read interrupt source number
466                 teq     \irqstat, #0                    @ ISR is 0 when no current interrupt, or spurious interrupt
467                 streq   \tmp, [\base, #AIC_EOICR]       @ not going to be handled further, then ACK it now.
468                 .endm
469
470                 .macro  irq_prio_table
471                 .endm
472
473 #elif defined(CONFIG_ARCH_MX1ADS)
474
475                 .macro  disable_fiq
476                 .endm
477 #define AITC_NIVECSR   0x40
478                 .macro  get_irqnr_and_base, irqnr, irqstat, base, tmp
479                 ldr     \irqstat, =IO_ADDRESS(MX1ADS_AITC_BASE)
480                 @ Load offset & priority of the highest priority
481                 @ interrupt pending.
482                 ldr     \irqnr, [\irqstat, #AITC_NIVECSR]
483                 @ Shift off the priority leaving the offset or
484                 @ "interrupt number"
485                 mov     \irqnr, \irqnr, lsr #16
486                 ldr     \irqstat, =1    @ dummy compare
487                 ldr     \base, =0xFFFF          // invalid interrupt
488                 cmp     \irqnr, \base
489                 bne     1001f
490                 ldr     \irqstat, =0
491 1001:
492                 tst     \irqstat, #1    @ to make the condition code = TRUE
493                 .endm
494
495                 .macro  irq_prio_table
496                 .endm
497
498 #elif defined(CONFIG_ARCH_OMAHA)
499
500                 .macro  disable_fiq
501                 .endm
502
503                 .macro  get_irqnr_and_base, irqnr, irqstat, base, tmp
504                 
505                 /* Read all interrupts pending... */
506                 ldr     \irqnr, =IO_ADDRESS(PLAT_PERIPHERAL_BASE) + OMAHA_INTPND
507                 ldr     \irqstat, [\irqnr]              /* INTPND */
508
509                 /* All pending irqs are now in \irqstat */
510                 mov     \irqnr, #0
511 1001:           tst     \irqstat, #1
512                 bne     1002f
513                 add     \irqnr, \irqnr, #1
514                 mov     \irqstat, \irqstat, lsr #1
515                 cmp     \irqnr, #MAXIRQNUM
516                 bcc     1001b
517 1002:           /* EQ will be set if we reach MAXIRQNUM */
518
519                 .endm
520
521                 .macro  irq_prio_table
522                 .endm    
523
524 #elif defined(CONFIG_ARCH_CLPS711X)
525
526 #include <asm/hardware/clps7111.h>
527
528                 .macro  disable_fiq
529                 .endm
530
531 #if (INTSR2 - INTSR1) != (INTMR2 - INTMR1)
532 #error INTSR stride != INTMR stride
533 #endif
534
535                 .macro  get_irqnr_and_base, irqnr, stat, base, mask
536                 mov     \base, #CLPS7111_BASE
537                 ldr     \stat, [\base, #INTSR1]
538                 ldr     \mask, [\base, #INTMR1]
539                 mov     \irqnr, #4
540                 mov     \mask, \mask, lsl #16
541                 and     \stat, \stat, \mask, lsr #16
542                 movs    \stat, \stat, lsr #4
543                 bne     1001f
544
545                 add     \base, \base, #INTSR2 - INTSR1
546                 ldr     \stat, [\base, #INTSR1]
547                 ldr     \mask, [\base, #INTMR1]
548                 mov     \irqnr, #16
549                 mov     \mask, \mask, lsl #16
550                 and     \stat, \stat, \mask, lsr #16
551
552 1001:           tst     \stat, #255
553                 addeq   \irqnr, \irqnr, #8
554                 moveq   \stat, \stat, lsr #8
555                 tst     \stat, #15
556                 addeq   \irqnr, \irqnr, #4
557                 moveq   \stat, \stat, lsr #4
558                 tst     \stat, #3
559                 addeq   \irqnr, \irqnr, #2
560                 moveq   \stat, \stat, lsr #2
561                 tst     \stat, #1
562                 addeq   \irqnr, \irqnr, #1
563                 moveq   \stat, \stat, lsr #1
564                 tst     \stat, #1                       @ bit 0 should be set
565                 .endm
566
567                 .macro  irq_prio_table
568                 .endm
569         
570 #elif defined (CONFIG_ARCH_CAMELOT)
571 #include <asm/arch/platform.h>
572 #undef IRQ_MODE /* same name defined in asm/proc/ptrace.h */
573 #include <asm/arch/int_ctrl00.h>
574         
575                 .macro  disable_fiq
576                 .endm
577
578                 .macro  get_irqnr_and_base, irqnr, irqstat, base, tmp
579         
580                 ldr     \irqstat, =INT_ID(IO_ADDRESS(EXC_INT_CTRL00_BASE))
581                 ldr     \irqnr,[\irqstat]               
582                 cmp     \irqnr,#0
583                 subne   \irqnr,\irqnr,#1
584
585         
586                 .endm
587
588                 .macro  irq_prio_table
589                 .endm
590
591 #elif defined(CONFIG_ARCH_ANAKIN)
592
593                 .macro  disable_fiq
594                 .endm
595
596                 .macro  get_irqnr_and_base, irqnr, irqstat, base, tmp
597                 mov     \base, #IO_BASE
598                 mov     \irqstat, #INTERRUPT_CONTROLLER
599                 ldr     \tmp, =anakin_irq_mask
600                 ldr     \irqstat, [\base, \irqstat]
601                 ldr     \tmp, [\tmp]
602                 ands    \irqstat, \irqstat, \tmp
603                 ldrne   \tmp, =anakin_active_irqs
604                 strne   \irqstat, [\tmp]
605                 movne   \irqnr, #IRQ_ANAKIN
606                 .endm
607
608                 .macro  irq_prio_table
609                 .ltorg
610                 .bss
611 ENTRY(anakin_irq_mask)
612                 .word   0
613 ENTRY(anakin_active_irqs)
614                 .space  4
615                 .text
616                 .endm
617
618 #else
619 #error Unknown architecture
620 #endif
621
622 /*
623  * Invalid mode handlers
624  */
625 __pabt_invalid: sub     sp, sp, #S_FRAME_SIZE           @ Allocate frame size in one go
626                 stmia   sp, {r0 - lr}                   @ Save XXX r0 - lr
627                 ldr     r4, .LCabt
628                 mov     r1, #BAD_PREFETCH
629                 b       1f
630
631 __dabt_invalid: sub     sp, sp, #S_FRAME_SIZE
632                 stmia   sp, {r0 - lr}                   @ Save SVC r0 - lr [lr *should* be intact]
633                 ldr     r4, .LCabt
634                 mov     r1, #BAD_DATA
635                 b       1f
636
637 __irq_invalid:  sub     sp, sp, #S_FRAME_SIZE           @ Allocate space on stack for frame
638                 stmfd   sp, {r0 - lr}                   @ Save r0 - lr
639                 ldr     r4, .LCirq
640                 mov     r1, #BAD_IRQ
641                 b       1f
642
643 __und_invalid:  sub     sp, sp, #S_FRAME_SIZE
644                 stmia   sp, {r0 - lr}
645                 ldr     r4, .LCund
646                 mov     r1, #BAD_UNDEFINSTR             @ int reason
647
648 1:              zero_fp
649                 ldmia   r4, {r5 - r7}                   @ Get XXX pc, cpsr, old_r0
650                 add     r4, sp, #S_PC
651                 stmia   r4, {r5 - r7}                   @ Save XXX pc, cpsr, old_r0
652                 mov     r0, sp
653                 and     r2, r6, #31                     @ int mode
654                 b       SYMBOL_NAME(bad_mode)
655
656 /* We get here if an undefined instruction happens and the floating
657  * point emulator is not present.  If the offending instruction was
658  * a WFS, we just perform a normal return as if we had emulated the
659  * operation.  This is a hack to allow some basic userland binaries
660  * to run so that the emulator module proper can be loaded. --philb
661  */
662 fpe_not_present:
663                 mov     pc, lr
664
665 /*
666  * SVC mode handlers
667  */
668                 .align  5
669 __dabt_svc:     sub     sp, sp, #S_FRAME_SIZE
670                 stmia   sp, {r0 - r12}                  @ save r0 - r12
671                 ldr     r2, .LCabt
672                 add     r0, sp, #S_FRAME_SIZE
673                 ldmia   r2, {r2 - r4}                   @ get pc, cpsr
674                 add     r5, sp, #S_SP
675                 mov     r1, lr
676                 stmia   r5, {r0 - r4}                   @ save sp_SVC, lr_SVC, pc, cpsr, old_ro
677                 mrs     r9, cpsr                        @ Enable interrupts if they were
678                 tst     r3, #I_BIT
679                 biceq   r9, r9, #I_BIT                  @ previously
680                 mov     r0, r2                          @ *** remove once everyones in sync
681 /*
682  * This routine must not corrupt r9
683  */
684 #ifdef MULTI_CPU
685                 ldr     r4, .LCprocfns                  @ pass r0, r3 to
686                 mov     lr, pc                          @ processor code
687                 ldr     pc, [r4]                        @ call processor specific code
688 #else
689                 bl      cpu_data_abort
690 #endif
691                 msr     cpsr_c, r9
692                 mov     r2, sp
693                 bl      SYMBOL_NAME(do_DataAbort)
694                 mov     r0, #I_BIT | MODE_SVC
695                 msr     cpsr_c, r0
696                 ldr     r0, [sp, #S_PSR]
697                 msr     spsr, r0
698                 ldmia   sp, {r0 - pc}^                  @ load r0 - pc, cpsr
699
700                 .align  5
701 __irq_svc:      sub     sp, sp, #S_FRAME_SIZE
702                 stmia   sp, {r0 - r12}                  @ save r0 - r12
703                 ldr     r7, .LCirq
704                 add     r5, sp, #S_FRAME_SIZE
705                 ldmia   r7, {r7 - r9}
706                 add     r4, sp, #S_SP
707                 mov     r6, lr
708                 stmia   r4, {r5, r6, r7, r8, r9}        @ save sp_SVC, lr_SVC, pc, cpsr, old_ro
709 1:              get_irqnr_and_base r0, r6, r5, lr
710                 movne   r1, sp
711                 @
712                 @ routine called with r0 = irq number, r1 = struct pt_regs *
713                 @
714                 adrsvc  ne, lr, 1b
715                 bne     asm_do_IRQ
716                 ldr     r0, [sp, #S_PSR]                @ irqs are already disabled
717                 msr     spsr, r0
718                 ldmia   sp, {r0 - pc}^                  @ load r0 - pc, cpsr
719
720                 .ltorg
721
722                 .align  5
723 __und_svc:      sub     sp, sp, #S_FRAME_SIZE
724                 stmia   sp, {r0 - r12}                  @ save r0 - r12
725                 ldr     r7, .LCund
726                 mov     r6, lr
727                 ldmia   r7, {r7 - r9}
728                 add     r5, sp, #S_FRAME_SIZE
729                 add     r4, sp, #S_SP
730                 stmia   r4, {r5 - r9}                   @ save sp_SVC, lr_SVC, pc, cpsr, old_ro
731
732                 adrsvc  al, r9, 1f                      @ r9  = normal FP return
733                 bl      call_fpe                        @ lr  = undefined instr return
734
735                 mov     r0, r5                          @ unsigned long pc
736                 mov     r1, sp                          @ struct pt_regs *regs
737                 bl      SYMBOL_NAME(do_undefinstr)
738
739 1:              mov     r0, #I_BIT | MODE_SVC
740                 msr     cpsr_c, r0
741                 ldr     lr, [sp, #S_PSR]                @ Get SVC cpsr
742                 msr     spsr, lr
743                 ldmia   sp, {r0 - pc}^                  @ Restore SVC registers
744
745                 .align  5
746 __pabt_svc:     sub     sp, sp, #S_FRAME_SIZE
747                 stmia   sp, {r0 - r12}                  @ save r0 - r12
748                 ldr     r2, .LCabt
749                 add     r0, sp, #S_FRAME_SIZE
750                 ldmia   r2, {r2 - r4}                   @ get pc, cpsr
751                 add     r5, sp, #S_SP
752                 mov     r1, lr
753                 stmia   r5, {r0 - r4}                   @ save sp_SVC, lr_SVC, pc, cpsr, old_ro
754                 mrs     r9, cpsr                        @ Enable interrupts if they were
755                 tst     r3, #I_BIT
756                 biceq   r9, r9, #I_BIT                  @ previously
757                 msr     cpsr_c, r9
758                 mov     r0, r2                          @ address (pc)
759                 mov     r1, sp                          @ regs
760                 bl      SYMBOL_NAME(do_PrefetchAbort)   @ call abort handler
761                 mov     r0, #I_BIT | MODE_SVC
762                 msr     cpsr_c, r0
763                 ldr     r0, [sp, #S_PSR]
764                 msr     spsr, r0
765                 ldmia   sp, {r0 - pc}^                  @ load r0 - pc, cpsr
766
767                 .align  5
768 .LCirq:         .word   __temp_irq
769 .LCund:         .word   __temp_und
770 .LCabt:         .word   __temp_abt
771 #ifdef MULTI_CPU
772 .LCprocfns:     .word   SYMBOL_NAME(processor)
773 #endif
774 .LCfp:          .word   SYMBOL_NAME(fp_enter)
775
776                 irq_prio_table
777
778 /*
779  * User mode handlers
780  */
781                 .align  5
782 __dabt_usr:     sub     sp, sp, #S_FRAME_SIZE           @ Allocate frame size in one go
783                 stmia   sp, {r0 - r12}                  @ save r0 - r12
784                 ldr     r7, .LCabt
785                 add     r5, sp, #S_PC
786                 ldmia   r7, {r2 - r4}                   @ Get USR pc, cpsr
787                 stmia   r5, {r2 - r4}                   @ Save USR pc, cpsr, old_r0
788                 stmdb   r5, {sp, lr}^
789                 alignment_trap r7, r7, __temp_abt
790                 zero_fp
791                 mov     r0, r2                          @ remove once everyones in sync
792 #ifdef MULTI_CPU
793                 ldr     r4, .LCprocfns                  @ pass r0, r3 to
794                 mov     lr, pc                          @ processor code
795                 ldr     pc, [r4]                        @ call processor specific code
796 #else
797                 bl      cpu_data_abort
798 #endif
799                 mov     r2, #MODE_SVC
800                 msr     cpsr_c, r2                      @ Enable interrupts
801                 mov     r2, sp
802                 adrsvc  al, lr, ret_from_exception
803                 b       SYMBOL_NAME(do_DataAbort)
804
805                 .align  5
806 __irq_usr:      sub     sp, sp, #S_FRAME_SIZE
807                 stmia   sp, {r0 - r12}                  @ save r0 - r12
808                 ldr     r4, .LCirq
809                 add     r8, sp, #S_PC
810                 ldmia   r4, {r5 - r7}                   @ get saved PC, SPSR
811                 stmia   r8, {r5 - r7}                   @ save pc, psr, old_r0
812                 stmdb   r8, {sp, lr}^
813                 alignment_trap r4, r7, __temp_irq
814                 zero_fp
815 1:              get_irqnr_and_base r0, r6, r5, lr
816                 movne   r1, sp
817                 adrsvc  ne, lr, 1b
818                 @
819                 @ routine called with r0 = irq number, r1 = struct pt_regs *
820                 @
821                 bne     asm_do_IRQ
822                 mov     why, #0
823                 get_current_task tsk
824                 b       ret_to_user
825
826                 .ltorg
827
828                 .align  5
829 __und_usr:      sub     sp, sp, #S_FRAME_SIZE           @ Allocate frame size in one go
830                 stmia   sp, {r0 - r12}                  @ Save r0 - r12
831                 ldr     r4, .LCund
832                 add     r8, sp, #S_PC
833                 ldmia   r4, {r5 - r7}
834                 stmia   r8, {r5 - r7}                   @ Save USR pc, cpsr, old_r0
835                 stmdb   r8, {sp, lr}^                   @ Save user sp, lr
836                 alignment_trap r4, r7, __temp_und
837                 zero_fp
838                 tst     r6, #T_BIT                      @ Thumb mode
839                 bne     fpundefinstr
840                 adrsvc  al, r9, ret_from_exception      @ r9  = normal FP return
841                 adrsvc  al, lr, fpundefinstr            @ lr  = undefined instr return
842
843 call_fpe:       get_current_task r10
844                 mov     r8, #1
845                 strb    r8, [r10, #TSK_USED_MATH]       @ set current->used_math
846                 ldr     r4, .LCfp
847                 add     r10, r10, #TSS_FPESAVE          @ r10 = workspace
848                 ldr     pc, [r4]                        @ Call FP module USR entry point
849
850 fpundefinstr:   mov     r0, #MODE_SVC
851                 msr     cpsr_c, r0                      @ Enable interrupts
852                 mov     r0, lr
853                 mov     r1, sp
854                 adrsvc  al, lr, ret_from_exception
855                 b       SYMBOL_NAME(do_undefinstr)
856
857                 .align  5
858 __pabt_usr:     sub     sp, sp, #S_FRAME_SIZE           @ Allocate frame size in one go
859                 stmia   sp, {r0 - r12}                  @ Save r0 - r12
860                 ldr     r4, .LCabt
861                 add     r8, sp, #S_PC
862                 ldmia   r4, {r5 - r7}                   @ Get USR pc, cpsr
863                 stmia   r8, {r5 - r7}                   @ Save USR pc, cpsr, old_r0
864                 stmdb   r8, {sp, lr}^                   @ Save sp_usr lr_usr
865                 alignment_trap r4, r7, __temp_abt
866                 zero_fp
867                 mov     r0, #MODE_SVC
868                 msr     cpsr_c, r0                      @ Enable interrupts
869                 mov     r0, r5                          @ address (pc)
870                 mov     r1, sp                          @ regs
871                 bl      SYMBOL_NAME(do_PrefetchAbort)   @ call abort handler
872                 /* fall through */
873 /*
874  * This is the return code to user mode for abort handlers
875  */
876 ENTRY(ret_from_exception)
877                 disable_irq r1
878                 mov     why, #0
879                 get_current_task tsk
880                 b       ret_to_user
881
882                 .data
883 ENTRY(fp_enter)
884                 .word   fpe_not_present
885                 .text
886 /*
887  * Register switch for ARMv3 and ARMv4 processors
888  * r0 = previous, r1 = next, return previous.
889  * previous and next are guaranteed not to be the same.
890  */
891 ENTRY(__switch_to)
892                 stmfd   sp!, {r4 - sl, fp, lr}          @ Store most regs on stack
893                 mrs     ip, cpsr
894                 str     ip, [sp, #-4]!                  @ Save cpsr_SVC
895                 str     sp, [r0, #TSS_SAVE]             @ Save sp_SVC
896                 ldr     sp, [r1, #TSS_SAVE]             @ Get saved sp_SVC
897                 ldr     r2, [r1, #TSS_DOMAIN]
898                 ldr     ip, [sp], #4
899                 mcr     p15, 0, r2, c3, c0              @ Set domain register
900                 msr     spsr, ip                        @ Save tasks CPSR into SPSR for this return
901                 ldmfd   sp!, {r4 - sl, fp, pc}^         @ Load all regs saved previously
902
903                 .section ".text.init",#alloc,#execinstr
904 /*
905  * Vector stubs.  NOTE that we only align 'vector_IRQ' to a cache line boundary,
906  * and we rely on each stub being exactly 48 (1.5 cache lines) in size.  This
907  * means that we only ever load two cache lines for this code, or one if we're
908  * lucky.  We also copy this code to 0x200 so that we can use branches in the
909  * vectors, rather than ldr's.
910  */
911                 .align  5
912 __stubs_start:
913 /*
914  * Interrupt dispatcher
915  * Enter in IRQ mode, spsr = SVC/USR CPSR, lr = SVC/USR PC
916  */
917 vector_IRQ:     @
918                 @ save mode specific registers
919                 @
920                 ldr     r13, .LCsirq
921                 sub     lr, lr, #4
922                 str     lr, [r13]                       @ save lr_IRQ
923                 mrs     lr, spsr
924                 str     lr, [r13, #4]                   @ save spsr_IRQ
925                 @
926                 @ now branch to the relevent MODE handling routine
927                 @
928                 mrs     r13, cpsr
929                 bic     r13, r13, #MODE_MASK
930                 orr     r13, r13, #I_BIT | MODE_SVC
931                 msr     spsr_c, r13                     @ switch to SVC_32 mode
932
933                 and     lr, lr, #15
934                 ldr     lr, [pc, lr, lsl #2]
935                 movs    pc, lr                          @ Changes mode and branches
936
937 .LCtab_irq:     .word   __irq_usr                       @  0  (USR_26 / USR_32)
938                 .word   __irq_invalid                   @  1  (FIQ_26 / FIQ_32)
939                 .word   __irq_invalid                   @  2  (IRQ_26 / IRQ_32)
940                 .word   __irq_svc                       @  3  (SVC_26 / SVC_32)
941                 .word   __irq_invalid                   @  4
942                 .word   __irq_invalid                   @  5
943                 .word   __irq_invalid                   @  6
944                 .word   __irq_invalid                   @  7
945                 .word   __irq_invalid                   @  8
946                 .word   __irq_invalid                   @  9
947                 .word   __irq_invalid                   @  a
948                 .word   __irq_invalid                   @  b
949                 .word   __irq_invalid                   @  c
950                 .word   __irq_invalid                   @  d
951                 .word   __irq_invalid                   @  e
952                 .word   __irq_invalid                   @  f
953
954                 .align  5
955
956 /*
957  * Data abort dispatcher - dispatches it to the correct handler for the processor mode
958  * Enter in ABT mode, spsr = USR CPSR, lr = USR PC
959  */
960 vector_data:    @
961                 @ save mode specific registers
962                 @
963                 ldr     r13, .LCsabt
964                 sub     lr, lr, #8
965                 str     lr, [r13]
966                 mrs     lr, spsr
967                 str     lr, [r13, #4]
968                 @
969                 @ now branch to the relevent MODE handling routine
970                 @
971                 mrs     r13, cpsr
972                 bic     r13, r13, #MODE_MASK
973                 orr     r13, r13, #I_BIT | MODE_SVC
974                 msr     spsr_c, r13                     @ switch to SVC_32 mode
975
976                 and     lr, lr, #15
977                 ldr     lr, [pc, lr, lsl #2]
978                 movs    pc, lr                          @ Changes mode and branches
979
980 .LCtab_dabt:    .word   __dabt_usr                      @  0  (USR_26 / USR_32)
981                 .word   __dabt_invalid                  @  1  (FIQ_26 / FIQ_32)
982                 .word   __dabt_invalid                  @  2  (IRQ_26 / IRQ_32)
983                 .word   __dabt_svc                      @  3  (SVC_26 / SVC_32)
984                 .word   __dabt_invalid                  @  4
985                 .word   __dabt_invalid                  @  5
986                 .word   __dabt_invalid                  @  6
987                 .word   __dabt_invalid                  @  7
988                 .word   __dabt_invalid                  @  8
989                 .word   __dabt_invalid                  @  9
990                 .word   __dabt_invalid                  @  a
991                 .word   __dabt_invalid                  @  b
992                 .word   __dabt_invalid                  @  c
993                 .word   __dabt_invalid                  @  d
994                 .word   __dabt_invalid                  @  e
995                 .word   __dabt_invalid                  @  f
996
997                 .align  5
998
999 /*
1000  * Prefetch abort dispatcher - dispatches it to the correct handler for the processor mode
1001  * Enter in ABT mode, spsr = USR CPSR, lr = USR PC
1002  */
1003 vector_prefetch:
1004                 @
1005                 @ save mode specific registers
1006                 @
1007                 ldr     r13, .LCsabt
1008                 sub     lr, lr, #4
1009                 str     lr, [r13]                       @ save lr_ABT
1010                 mrs     lr, spsr
1011                 str     lr, [r13, #4]                   @ save spsr_ABT
1012                 @
1013                 @ now branch to the relevent MODE handling routine
1014                 @
1015                 mrs     r13, cpsr
1016                 bic     r13, r13, #MODE_MASK
1017                 orr     r13, r13, #I_BIT | MODE_SVC
1018                 msr     spsr_c, r13                     @ switch to SVC_32 mode
1019
1020                 ands    lr, lr, #15
1021                 ldr     lr, [pc, lr, lsl #2]
1022                 movs    pc, lr
1023
1024 .LCtab_pabt:    .word   __pabt_usr                      @  0 (USR_26 / USR_32)
1025                 .word   __pabt_invalid                  @  1 (FIQ_26 / FIQ_32)
1026                 .word   __pabt_invalid                  @  2 (IRQ_26 / IRQ_32)
1027                 .word   __pabt_svc                      @  3 (SVC_26 / SVC_32)
1028                 .word   __pabt_invalid                  @  4
1029                 .word   __pabt_invalid                  @  5
1030                 .word   __pabt_invalid                  @  6
1031                 .word   __pabt_invalid                  @  7
1032                 .word   __pabt_invalid                  @  8
1033                 .word   __pabt_invalid                  @  9
1034                 .word   __pabt_invalid                  @  a
1035                 .word   __pabt_invalid                  @  b
1036                 .word   __pabt_invalid                  @  c
1037                 .word   __pabt_invalid                  @  d
1038                 .word   __pabt_invalid                  @  e
1039                 .word   __pabt_invalid                  @  f
1040
1041                 .align  5
1042
1043 /*
1044  * Undef instr entry dispatcher - dispatches it to the correct handler for the processor mode
1045  * Enter in UND mode, spsr = SVC/USR CPSR, lr = SVC/USR PC
1046  */
1047 vector_undefinstr:
1048                 @
1049                 @ save mode specific registers
1050                 @
1051                 ldr     r13, .LCsund
1052                 str     lr, [r13]                       @ save lr_UND
1053                 mrs     lr, spsr
1054                 str     lr, [r13, #4]                   @ save spsr_UND
1055                 @
1056                 @ now branch to the relevent MODE handling routine
1057                 @
1058                 mrs     r13, cpsr
1059                 bic     r13, r13, #MODE_MASK
1060                 orr     r13, r13, #I_BIT | MODE_SVC
1061                 msr     spsr_c, r13                     @ switch to SVC_32 mode
1062
1063                 and     lr, lr, #15
1064                 ldr     lr, [pc, lr, lsl #2]
1065                 movs    pc, lr                          @ Changes mode and branches
1066
1067 .LCtab_und:     .word   __und_usr                       @  0 (USR_26 / USR_32)
1068                 .word   __und_invalid                   @  1 (FIQ_26 / FIQ_32)
1069                 .word   __und_invalid                   @  2 (IRQ_26 / IRQ_32)
1070                 .word   __und_svc                       @  3 (SVC_26 / SVC_32)
1071                 .word   __und_invalid                   @  4
1072                 .word   __und_invalid                   @  5
1073                 .word   __und_invalid                   @  6
1074                 .word   __und_invalid                   @  7
1075                 .word   __und_invalid                   @  8
1076                 .word   __und_invalid                   @  9
1077                 .word   __und_invalid                   @  a
1078                 .word   __und_invalid                   @  b
1079                 .word   __und_invalid                   @  c
1080                 .word   __und_invalid                   @  d
1081                 .word   __und_invalid                   @  e
1082                 .word   __und_invalid                   @  f
1083
1084                 .align  5
1085
1086 /*=============================================================================
1087  * Undefined FIQs
1088  *-----------------------------------------------------------------------------
1089  * Enter in FIQ mode, spsr = ANY CPSR, lr = ANY PC
1090  * MUST PRESERVE SVC SPSR, but need to switch to SVC mode to show our msg.
1091  * Basically to switch modes, we *HAVE* to clobber one register...  brain
1092  * damage alert!  I don't think that we can execute any code in here in any
1093  * other mode than FIQ...  Ok you can switch to another mode, but you can't
1094  * get out of that mode without clobbering one register.
1095  */
1096 vector_FIQ:     disable_fiq
1097                 subs    pc, lr, #4
1098
1099 /*=============================================================================
1100  * Address exception handler
1101  *-----------------------------------------------------------------------------
1102  * These aren't too critical.
1103  * (they're not supposed to happen, and won't happen in 32-bit data mode).
1104  */
1105
1106 vector_addrexcptn:
1107                 b       vector_addrexcptn
1108
1109 /*
1110  * We group all the following data together to optimise
1111  * for CPUs with separate I & D caches.
1112  */
1113                 .align  5
1114
1115 .LCvswi:        .word   vector_swi
1116
1117 .LCsirq:        .word   __temp_irq
1118 .LCsund:        .word   __temp_und
1119 .LCsabt:        .word   __temp_abt
1120
1121 __stubs_end:
1122
1123                 .equ    __real_stubs_start, .LCvectors + 0x200
1124
1125 .LCvectors:     swi     SYS_ERROR0
1126                 b       __real_stubs_start + (vector_undefinstr - __stubs_start)
1127                 ldr     pc, __real_stubs_start + (.LCvswi - __stubs_start)
1128                 b       __real_stubs_start + (vector_prefetch - __stubs_start)
1129                 b       __real_stubs_start + (vector_data - __stubs_start)
1130                 b       __real_stubs_start + (vector_addrexcptn - __stubs_start)
1131                 b       __real_stubs_start + (vector_IRQ - __stubs_start)
1132                 b       __real_stubs_start + (vector_FIQ - __stubs_start)
1133
1134 ENTRY(__trap_init)
1135                 stmfd   sp!, {r4 - r6, lr}
1136
1137                 adr     r1, .LCvectors                  @ set up the vectors
1138                 ldmia   r1, {r1, r2, r3, r4, r5, r6, ip, lr}
1139                 stmia   r0, {r1, r2, r3, r4, r5, r6, ip, lr}
1140
1141                 add     r2, r0, #0x200
1142                 adr     r0, __stubs_start               @ copy stubs to 0x200
1143                 adr     r1, __stubs_end
1144 1:              ldr     r3, [r0], #4
1145                 str     r3, [r2], #4
1146                 cmp     r0, r1
1147                 blt     1b
1148                 LOADREGS(fd, sp!, {r4 - r6, pc})
1149
1150                 .data
1151
1152 /*
1153  * Do not reorder these, and do not insert extra data between...
1154  */
1155
1156 __temp_irq:     .word   0                               @ saved lr_irq
1157                 .word   0                               @ saved spsr_irq
1158                 .word   -1                              @ old_r0
1159 __temp_und:     .word   0                               @ Saved lr_und
1160                 .word   0                               @ Saved spsr_und
1161                 .word   -1                              @ old_r0
1162 __temp_abt:     .word   0                               @ Saved lr_abt
1163                 .word   0                               @ Saved spsr_abt
1164                 .word   -1                              @ old_r0
1165
1166                 .globl  SYMBOL_NAME(cr_alignment)
1167                 .globl  SYMBOL_NAME(cr_no_alignment)
1168 SYMBOL_NAME(cr_alignment):
1169                 .space  4
1170 SYMBOL_NAME(cr_no_alignment):
1171                 .space  4