2 * linux/arch/arm/kernel/entry-armv.S
4 * Copyright (C) 1996,1997,1998 Russell King.
5 * ARM700 fix by Matthew Godbolt (linux-user@willothewisp.demon.co.uk)
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
11 * Low-level vector interface routines
13 * Note: there is a StrongARM bug in the STMIA rn, {regs}^ instruction that causes
14 * it to save wrong values... Be aware!
16 #include <linux/config.h>
17 #include <linux/init.h>
19 #include <asm/thread_info.h>
21 #include <asm/ptrace.h>
22 #include <asm/vfpmacros.h>
24 #include "entry-header.S"
27 /* IOC / IOMD based hardware */
28 #include <asm/hardware/iomd.h>
30 .equ ioc_base_high, IOC_BASE & 0xff000000
31 .equ ioc_base_low, IOC_BASE & 0x00ff0000
33 mov r12, #ioc_base_high
35 orr r12, r12, #ioc_base_low
37 strb r12, [r12, #0x38] @ Disable FIQ register
40 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
41 mov r4, #ioc_base_high @ point at IOC
43 orr r4, r4, #ioc_base_low
45 ldrb \irqstat, [r4, #IOMD_IRQREQB] @ get high priority first
46 ldr \base, =irq_prio_h
49 ldreqb \irqstat, [r4, #IOMD_DMAREQ] @ get dma
50 addeq \base, \base, #256 @ irq_prio_h table size
54 ldreqb \irqstat, [r4, #IOMD_IRQREQA] @ get low priority
55 addeq \base, \base, #256 @ irq_prio_d table size
58 ldreqb \irqstat, [r4, #IOMD_IRQREQC]
59 addeq \base, \base, #256 @ irq_prio_l table size
63 ldreqb \irqstat, [r4, #IOMD_IRQREQD]
64 addeq \base, \base, #256 @ irq_prio_lc table size
67 2406: ldrneb \irqnr, [\base, \irqstat] @ get IRQ number
71 * Interrupt table (incorporates priority). Please note that we
72 * rely on the order of these tables (see above code).
75 irq_prio_h: .byte 0, 8, 9, 8,10,10,10,10,11,11,11,11,10,10,10,10
76 .byte 12, 8, 9, 8,10,10,10,10,11,11,11,11,10,10,10,10
77 .byte 13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
78 .byte 13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
79 .byte 14,14,14,14,10,10,10,10,11,11,11,11,10,10,10,10
80 .byte 14,14,14,14,10,10,10,10,11,11,11,11,10,10,10,10
81 .byte 13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
82 .byte 13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
83 .byte 15,15,15,15,10,10,10,10,11,11,11,11,10,10,10,10
84 .byte 15,15,15,15,10,10,10,10,11,11,11,11,10,10,10,10
85 .byte 13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
86 .byte 13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
87 .byte 15,15,15,15,10,10,10,10,11,11,11,11,10,10,10,10
88 .byte 15,15,15,15,10,10,10,10,11,11,11,11,10,10,10,10
89 .byte 13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
90 .byte 13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
92 irq_prio_d: .byte 0,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
93 .byte 20,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
94 .byte 21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
95 .byte 21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
96 .byte 22,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
97 .byte 22,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
98 .byte 21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
99 .byte 21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
100 .byte 23,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
101 .byte 23,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
102 .byte 21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
103 .byte 21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
104 .byte 22,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
105 .byte 22,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
106 .byte 21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
107 .byte 21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
109 irq_prio_l: .byte 0, 0, 1, 0, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3
110 .byte 4, 0, 1, 0, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3
111 .byte 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5
112 .byte 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5
113 .byte 6, 6, 6, 6, 6, 6, 6, 6, 3, 3, 3, 3, 3, 3, 3, 3
114 .byte 6, 6, 6, 6, 6, 6, 6, 6, 3, 3, 3, 3, 3, 3, 3, 3
115 .byte 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5
116 .byte 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5
117 .byte 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
118 .byte 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
119 .byte 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
120 .byte 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
121 .byte 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
122 .byte 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
123 .byte 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
124 .byte 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
126 irq_prio_lc: .byte 24,24,25,24,26,26,26,26,27,27,27,27,27,27,27,27
127 .byte 28,24,25,24,26,26,26,26,27,27,27,27,27,27,27,27
128 .byte 29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29
129 .byte 29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29
130 .byte 30,30,30,30,30,30,30,30,27,27,27,27,27,27,27,27
131 .byte 30,30,30,30,30,30,30,30,27,27,27,27,27,27,27,27
132 .byte 29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29
133 .byte 29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29
134 .byte 31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
135 .byte 31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
136 .byte 31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
137 .byte 31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
138 .byte 31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
139 .byte 31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
140 .byte 31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
141 .byte 31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
144 irq_prio_ld: .byte 40,40,41,40,42,42,42,42,43,43,43,43,43,43,43,43
145 .byte 44,40,41,40,42,42,42,42,43,43,43,43,43,43,43,43
146 .byte 45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45
147 .byte 45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45
148 .byte 46,46,46,46,46,46,46,46,43,43,43,43,43,43,43,43
149 .byte 46,46,46,46,46,46,46,46,43,43,43,43,43,43,43,43
150 .byte 45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45
151 .byte 45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45
152 .byte 47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
153 .byte 47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
154 .byte 47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
155 .byte 47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
156 .byte 47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
157 .byte 47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
158 .byte 47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
159 .byte 47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
163 #elif defined(CONFIG_ARCH_EBSA110)
165 #define IRQ_STAT 0xff000000 /* read */
170 .macro get_irqnr_and_base, irqnr, stat, base, tmp
172 ldrb \stat, [\base] @ get interrupts
175 addeq \irqnr, \irqnr, #4
176 moveq \stat, \stat, lsr #4
178 addeq \irqnr, \irqnr, #2
179 moveq \stat, \stat, lsr #2
181 addeq \irqnr, \irqnr, #1
182 moveq \stat, \stat, lsr #1
183 tst \stat, #1 @ bit 0 should be set
186 .macro irq_prio_table
189 #elif defined(CONFIG_ARCH_SHARK)
194 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
198 strb \irqstat, [r4, #0x20] @outb(0x0C, 0x20) /* Poll command */
199 ldrb \irqnr, [r4, #0x20] @irq = inb(0x20) & 7
200 and \irqstat, \irqnr, #0x80
203 and \irqnr, \irqnr, #7
206 43: mov \irqstat, #0x0C
207 strb \irqstat, [r4, #0xa0] @outb(0x0C, 0xA0) /* Poll command */
208 ldrb \irqnr, [r4, #0xa0] @irq = (inb(0xA0) & 7) + 8
209 and \irqstat, \irqnr, #0x80
212 and \irqnr, \irqnr, #7
213 add \irqnr, \irqnr, #8
217 .macro irq_prio_table
220 #elif defined(CONFIG_FOOTBRIDGE)
221 #include <asm/hardware/dec21285.h>
226 .equ dc21285_high, ARMCSR_BASE & 0xff000000
227 .equ dc21285_low, ARMCSR_BASE & 0x00ffffff
229 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
230 mov r4, #dc21285_high
232 orr r4, r4, #dc21285_low
234 ldr \irqstat, [r4, #0x180] @ get interrupts
236 mov \irqnr, #IRQ_SDRAMPARITY
237 tst \irqstat, #IRQ_MASK_SDRAMPARITY
240 tst \irqstat, #IRQ_MASK_UART_RX
241 movne \irqnr, #IRQ_CONRX
244 tst \irqstat, #IRQ_MASK_DMA1
245 movne \irqnr, #IRQ_DMA1
248 tst \irqstat, #IRQ_MASK_DMA2
249 movne \irqnr, #IRQ_DMA2
252 tst \irqstat, #IRQ_MASK_IN0
253 movne \irqnr, #IRQ_IN0
256 tst \irqstat, #IRQ_MASK_IN1
257 movne \irqnr, #IRQ_IN1
260 tst \irqstat, #IRQ_MASK_IN2
261 movne \irqnr, #IRQ_IN2
264 tst \irqstat, #IRQ_MASK_IN3
265 movne \irqnr, #IRQ_IN3
268 tst \irqstat, #IRQ_MASK_PCI
269 movne \irqnr, #IRQ_PCI
272 tst \irqstat, #IRQ_MASK_DOORBELLHOST
273 movne \irqnr, #IRQ_DOORBELLHOST
276 tst \irqstat, #IRQ_MASK_I2OINPOST
277 movne \irqnr, #IRQ_I2OINPOST
280 tst \irqstat, #IRQ_MASK_TIMER1
281 movne \irqnr, #IRQ_TIMER1
284 tst \irqstat, #IRQ_MASK_TIMER2
285 movne \irqnr, #IRQ_TIMER2
288 tst \irqstat, #IRQ_MASK_TIMER3
289 movne \irqnr, #IRQ_TIMER3
292 tst \irqstat, #IRQ_MASK_UART_TX
293 movne \irqnr, #IRQ_CONTX
296 tst \irqstat, #IRQ_MASK_PCI_ABORT
297 movne \irqnr, #IRQ_PCI_ABORT
300 tst \irqstat, #IRQ_MASK_PCI_SERR
301 movne \irqnr, #IRQ_PCI_SERR
304 tst \irqstat, #IRQ_MASK_DISCARD_TIMER
305 movne \irqnr, #IRQ_DISCARD_TIMER
308 tst \irqstat, #IRQ_MASK_PCI_DPERR
309 movne \irqnr, #IRQ_PCI_DPERR
312 tst \irqstat, #IRQ_MASK_PCI_PERR
313 movne \irqnr, #IRQ_PCI_PERR
317 .macro irq_prio_table
320 #elif defined(CONFIG_ARCH_NEXUSPCI)
325 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
326 ldr \irqstat, =INTCONT_BASE
327 ldr \base, =soft_irq_mask
328 ldr \irqstat, [\irqstat] @ get interrupts
331 and \irqstat, \irqstat, \base @ mask out disabled ones
332 1001: tst \irqstat, #1
333 addeq \irqnr, \irqnr, #1
334 moveq \irqstat, \irqstat, lsr #1
340 .macro irq_prio_table
348 #elif defined(CONFIG_ARCH_TBOX)
353 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
354 ldr \irqstat, =0xffff7000
355 ldr \irqstat, [\irqstat] @ get interrupts
356 ldr \base, =soft_irq_mask
359 and \irqstat, \irqstat, \base @ mask out disabled ones
360 1001: tst \irqstat, #1
361 addeq \irqnr, \irqnr, #1
362 moveq \irqstat, \irqstat, lsr #1
368 .macro irq_prio_table
376 #elif defined(CONFIG_ARCH_SA1100)
381 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
382 mov r4, #0xfa000000 @ ICIP = 0xfa050000
383 add r4, r4, #0x00050000
384 ldr \irqstat, [r4] @ get irqs
385 ldr \irqnr, [r4, #4] @ ICMR = 0xfa050004
386 ands \irqstat, \irqstat, \irqnr
390 moveq \irqstat, \irqstat, lsr #8
391 addeq \irqnr, \irqnr, #8
392 tsteq \irqstat, #0xff
393 moveq \irqstat, \irqstat, lsr #8
394 addeq \irqnr, \irqnr, #8
395 tsteq \irqstat, #0xff
396 moveq \irqstat, \irqstat, lsr #8
397 addeq \irqnr, \irqnr, #8
399 moveq \irqstat, \irqstat, lsr #4
400 addeq \irqnr, \irqnr, #4
402 moveq \irqstat, \irqstat, lsr #2
403 addeq \irqnr, \irqnr, #2
405 addeqs \irqnr, \irqnr, #1
409 .macro irq_prio_table
412 #elif defined(CONFIG_ARCH_L7200)
413 #include <asm/hardware.h>
415 .equ irq_base_addr, IO_BASE_2
420 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
421 mov \irqstat, #irq_base_addr @ Virt addr IRQ regs
422 add \irqstat, \irqstat, #0x00001000 @ Status reg
423 ldr \irqstat, [\irqstat, #0] @ get interrupts
425 1001: tst \irqstat, #1
426 addeq \irqnr, \irqnr, #1
427 moveq \irqstat, \irqstat, lsr #1
433 .macro irq_prio_table
436 #elif defined(CONFIG_ARCH_INTEGRATOR)
441 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
442 /* FIXME: should not be using soo many LDRs here */
443 ldr \base, =IO_ADDRESS(INTEGRATOR_IC_BASE)
444 mov \irqnr, #IRQ_PIC_START
445 ldr \irqstat, [\base, #IRQ_STATUS] @ get masked status
446 ldr \base, =IO_ADDRESS(INTEGRATOR_HDR_BASE)
448 ldreq \irqstat, [\base, #(INTEGRATOR_HDR_IC_OFFSET+IRQ_STATUS)]
449 moveq \irqnr, #IRQ_CIC_START
451 1001: tst \irqstat, #15
453 add \irqnr, \irqnr, #4
454 movs \irqstat, \irqstat, lsr #4
456 1002: tst \irqstat, #1
458 add \irqnr, \irqnr, #1
459 movs \irqstat, \irqstat, lsr #1
461 1003: /* EQ will be set if no irqs pending */
464 .macro irq_prio_table
467 #elif defined(CONFIG_ARCH_VERSATILE_PB)
472 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
473 ldr \base, =IO_ADDRESS(VERSATILE_VIC_BASE)
474 ldr \irqstat, [\base, #VIC_IRQ_STATUS] @ get masked status
479 1001: tst \irqstat, #15
481 add \irqnr, \irqnr, #4
482 movs \irqstat, \irqstat, lsr #4
484 1002: tst \irqstat, #1
486 add \irqnr, \irqnr, #1
487 movs \irqstat, \irqstat, lsr #1
489 1003: /* EQ will be set if no irqs pending */
491 @ clz \irqnr, \irqstat
492 @1003: /* EQ will be set if we reach MAXIRQNUM */
495 .macro irq_prio_table
498 #elif defined(CONFIG_ARCH_CLPS711X)
500 #include <asm/hardware/clps7111.h>
505 #if (INTSR2 - INTSR1) != (INTMR2 - INTMR1)
506 #error INTSR stride != INTMR stride
509 .macro get_irqnr_and_base, irqnr, stat, base, mask
510 mov \base, #CLPS7111_BASE
511 ldr \stat, [\base, #INTSR1]
512 ldr \mask, [\base, #INTMR1]
514 mov \mask, \mask, lsl #16
515 and \stat, \stat, \mask, lsr #16
516 movs \stat, \stat, lsr #4
519 add \base, \base, #INTSR2 - INTSR1
520 ldr \stat, [\base, #INTSR1]
521 ldr \mask, [\base, #INTMR1]
523 mov \mask, \mask, lsl #16
524 and \stat, \stat, \mask, lsr #16
526 1001: tst \stat, #255
527 addeq \irqnr, \irqnr, #8
528 moveq \stat, \stat, lsr #8
530 addeq \irqnr, \irqnr, #4
531 moveq \stat, \stat, lsr #4
533 addeq \irqnr, \irqnr, #2
534 moveq \stat, \stat, lsr #2
536 addeq \irqnr, \irqnr, #1
537 moveq \stat, \stat, lsr #1
538 tst \stat, #1 @ bit 0 should be set
541 .macro irq_prio_table
544 #elif defined (CONFIG_ARCH_CAMELOT)
545 #include <asm/arch/platform.h>
546 #undef IRQ_MODE /* same name defined in asm/proc/ptrace.h */
547 #include <asm/arch/int_ctrl00.h>
552 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
554 ldr \irqstat, =INT_ID(IO_ADDRESS(EXC_INT_CTRL00_BASE))
555 ldr \irqnr,[\irqstat]
557 subne \irqnr,\irqnr,#1
562 .macro irq_prio_table
565 #elif defined(CONFIG_ARCH_IOP310)
570 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
571 mrc p13, 0, \irqstat, c4, c0, 0 @ get INTSRC
572 mrc p13, 0, \base, c0, c0, 0 @ get INTCTL
574 tst \irqstat, #(1<<29) @ if INTSRC_BI
575 tstne \base, #(1<<3) @ and INTCTL_BM
576 movne \irqnr, #IRQ_XS80200_BCU
579 tst \irqstat, #(1<<28) @ if INTSRC_PI
580 tstne \base, #(1<<2) @ and INTCTL_PM
581 movne \irqnr, #IRQ_XS80200_PMU
584 tst \irqstat, #(1<<31) @ if INTSRC_FI
585 tstne \base, #(1<<0) @ and INTCTL_FM
586 movne \irqnr, #IRQ_XS80200_EXTFIQ
589 tst \irqstat, #(1<<30) @ if INTSRC_II
590 tstne \base, #(1<<1) @ and INTCTL_IM
591 movne \irqnr, #IRQ_XS80200_EXTIRQ
596 .macro irq_prio_table
599 #elif defined(CONFIG_ARCH_IOP321)
604 * Note: only deal with normal interrupts, not FIQ
606 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
608 mrc p6, 0, \irqstat, c8, c0, 0 @ Read IINTSRC
613 subs \irqnr,\base,\irqnr
614 add \irqnr,\irqnr,#IRQ_IOP321_DMA0_EOT
618 .macro irq_prio_table
621 #elif defined(CONFIG_ARCH_PXA)
626 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
628 mrc p6, 0, \irqstat, c0, c0, 0 @ ICIP
629 mrc p6, 0, \irqnr, c1, c0, 0 @ ICMR
631 mov \base, #io_p2v(0x40000000) @ IIR Ctl = 0x40d00000
632 add \base, \base, #0x00d00000
633 ldr \irqstat, [\base, #0] @ ICIP
634 ldr \irqnr, [\base, #4] @ ICMR
636 ands \irqnr, \irqstat, \irqnr
638 rsb \irqstat, \irqnr, #0
639 and \irqstat, \irqstat, \irqnr
641 rsb \irqnr, \irqnr, #(31 - PXA_IRQ_SKIP)
645 .macro irq_prio_table
648 #elif defined (CONFIG_ARCH_IXP4XX)
653 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
654 ldr \irqstat, =(IXP4XX_INTC_BASE_VIRT+IXP4XX_ICIP_OFFSET)
655 ldr \irqstat, [\irqstat] @ get interrupts
660 subs \irqnr, \base, \irqnr
663 1001: tst \irqstat, #1
664 addeq \irqnr, \irqnr, #1
665 moveq \irqstat, \irqstat, lsr #1
673 .macro irq_prio_table
676 #elif defined(CONFIG_ARCH_OMAP)
681 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
682 ldr \base, =IO_ADDRESS(OMAP_IH1_BASE)
683 ldr \irqnr, [\base, #IRQ_ITR_REG_OFFSET]
684 ldr \tmp, [\base, #IRQ_MIR_REG_OFFSET]
685 mov \irqstat, #0xffffffff
686 bic \tmp, \irqstat, \tmp
690 ldr \irqnr, [\base, #IRQ_SIR_FIQ_REG_OFFSET]
692 ldreq \irqnr, [\base, #IRQ_SIR_IRQ_REG_OFFSET]
693 cmpeq \irqnr, #INT_IH2_IRQ
694 ldreq \base, =IO_ADDRESS(OMAP_IH2_BASE)
695 ldreq \irqnr, [\base, #IRQ_SIR_IRQ_REG_OFFSET]
696 addeqs \irqnr, \irqnr, #32
700 .macro irq_prio_table
703 #elif defined(CONFIG_ARCH_S3C2410)
704 /* S3C2410X IRQ Handler, <ben@simtec.co.uk> */
706 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
709 mov \tmp, #S3C2410_VA_IRQ
710 ldr \irqnr, [ \tmp, #0x14 ] @ get irq no
713 beq 1002f @ external irq reg
715 beq 1003f @ lcd controller
717 @ debug check to see if interrupt reported is the same
722 ldr \irqstat, [ \tmp, #0x10 ] @ INTPND
723 mov \irqstat, \irqstat, lsr \irqnr
728 stmfd r13!, { r0 - r4 , r14 }
729 ldr r1, [ \tmp, #0x14 ] @ intoffset
730 ldr r2, [ \tmp, #0x10 ] @ INTPND
731 ldr r3, [ \tmp, #0x00 ] @ SRCPND
737 .ascii "<7>irq: err - bad offset %d, intpnd=%08x, srcpnd=%08x\n"
742 mov \tmp, #S3C2410_VA_IRQ
743 ldmfd r13!, { r0 - r4 , r14 }
745 @ try working out interript number for ourselves
747 ldr \irqstat, [ \tmp, #0x10 ] @ INTPND
749 movs \irqstat, \irqstat, lsr#1
750 bcs 30000b @ try and re-start the proccess
751 add \irqnr, \irqnr, #1
755 @ found no interrupt, set Z flag and leave
761 @ we base the s3c2410x interrupts at 16 and above to allow
762 @ isa peripherals to have their standard interrupts, also
763 @ ensure that Z flag is un-set on exit
765 @ note, we cannot be sure if we get IRQ_EINT0 (0) that
766 @ there is simply no interrupt pending, so in all other
767 @ cases we jump to say we have found something, otherwise
768 @ we check to see if the interrupt really is assrted
769 adds \irqnr, \irqnr, #IRQ_EINT0
770 teq \irqnr, #IRQ_EINT0
772 ldr \irqstat, [ \tmp, #0x10 ] @ INTPND
777 @ we get here from no main or external interrupts pending
779 add \tmp, \tmp, #S3C2410_VA_GPIO - S3C2410_VA_IRQ
780 ldr \irqstat, [ \tmp, # 0xa8 ] @ EXTINTPEND
781 ldr \irqnr, [ \tmp, # 0xa4 ] @ EXTINTMASK
783 bic \irqstat, \irqstat, \irqnr @ clear masked irqs
785 mov \irqnr, #IRQ_EINT4 @ start extint nos
786 mov \irqstat, \irqstat, lsr#4 @ ignore bottom 4 bits
788 movs \irqstat, \irqstat, lsr#1
790 add \irqnr, \irqnr, #1
791 cmp \irqnr, #IRQ_EINT23
794 @ found no interrupt, set Z flag and leave
799 @ lcd interrupt has been asserted...
800 add \tmp, \tmp, #S3C2410_VA_LCD - S3C2410_VA_IRQ
801 ldr \irqstat, [ \tmp, # 0x54 ] @ lcd int pending
804 movne \irqnr, #IRQ_LCD_FRAME
806 movne \irqnr, #IRQ_LCD_FIFO
808 @ fall through to exit with flags updated
810 1004: @ ensure Z flag clear in case our MOVS shifted out the last bit
817 /* currently don't need an disable_fiq macro */
822 /* we don't have an irq priority table */
823 .macro irq_prio_table
826 #elif defined(CONFIG_ARCH_LH7A400)
828 # if defined (CONFIG_ARCH_LH7A404)
829 # error "LH7A400 and LH7A404 are mutually exclusive"
834 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
836 mov \base, #io_p2v(0x80000000) @ APB registers
837 ldr \irqstat, [\base, #0x500] @ PIC INTSR
839 1001: movs \irqstat, \irqstat, lsr #1 @ Shift into carry
840 bcs 1008f @ Bit set; irq found
841 add \irqnr, \irqnr, #1
842 bne 1001b @ Until no bits
843 b 1009f @ Nothing? Hmm.
844 1008: movs \irqstat, #1 @ Force !Z
848 .macro irq_prio_table
851 #elif defined(CONFIG_ARCH_LH7A404)
856 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
857 mov \irqnr, #0 @ VIC1 irq base
858 mov \base, #io_p2v(0x80000000) @ APB registers
859 add \base, \base, #0x8000
860 ldr \tmp, [\base, #0x0030] @ VIC1_VECTADDR
861 tst \tmp, #VA_VECTORED @ Direct vectored
863 tst \tmp, #VA_VIC1DEFAULT @ Default vectored VIC1
864 ldrne \irqstat, [\base, #0] @ VIC1_IRQSTATUS
866 add \base, \base, #(0xa000 - 0x8000)
867 ldr \tmp, [\base, #0x0030] @ VIC2_VECTADDR
868 tst \tmp, #VA_VECTORED @ Direct vectored
870 ldr \irqstat, [\base, #0] @ VIC2_IRQSTATUS
871 mov \irqnr, #32 @ VIC2 irq base
873 1001: movs \irqstat, \irqstat, lsr #1 @ Shift into carry
874 bcs 1008f @ Bit set; irq found
875 add \irqnr, \irqnr, #1
876 bne 1001b @ Until no bits
877 b 1009f @ Nothing? Hmm.
878 1002: and \irqnr, \tmp, #0x3f @ Mask for valid bits
879 1008: movs \irqstat, #1 @ Force !Z
880 str \tmp, [\base, #0x0030] @ Clear vector
884 .macro irq_prio_table
888 #error Unknown architecture
892 * Invalid mode handlers
894 __pabt_invalid: sub sp, sp, #S_FRAME_SIZE @ Allocate frame size in one go
895 stmia sp, {r0 - lr} @ Save XXX r0 - lr
897 mov r1, #BAD_PREFETCH
900 __dabt_invalid: sub sp, sp, #S_FRAME_SIZE
901 stmia sp, {r0 - lr} @ Save SVC r0 - lr [lr *should* be intact]
906 __irq_invalid: sub sp, sp, #S_FRAME_SIZE @ Allocate space on stack for frame
907 stmfd sp, {r0 - lr} @ Save r0 - lr
912 __und_invalid: sub sp, sp, #S_FRAME_SIZE
915 mov r1, #BAD_UNDEFINSTR @ int reason
918 ldmia r4, {r5 - r7} @ Get XXX pc, cpsr, old_r0
920 stmia r4, {r5 - r7} @ Save XXX pc, cpsr, old_r0
922 and r2, r6, #31 @ int mode
929 __dabt_svc: sub sp, sp, #S_FRAME_SIZE
930 stmia sp, {r0 - r12} @ save r0 - r12
932 add r0, sp, #S_FRAME_SIZE
933 ldmia r2, {r2 - r4} @ get pc, cpsr
936 stmia r5, {r0 - r4} @ save sp_SVC, lr_SVC, pc, cpsr, old_ro
937 mrs r9, cpsr @ Enable interrupts if they were
939 biceq r9, r9, #PSR_I_BIT @ previously
941 * This routine must not corrupt r9
944 ldr r4, .LCprocfns @ pass r2, r3 to
945 mov lr, pc @ processor code
946 ldr pc, [r4] @ call processor specific code
956 ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr
959 __irq_svc: sub sp, sp, #S_FRAME_SIZE
960 stmia sp, {r0 - r12} @ save r0 - r12
962 add r5, sp, #S_FRAME_SIZE
966 stmia r4, {r5, r6, r7, r8, r9} @ save sp_SVC, lr_SVC, pc, cpsr, old_ro
967 #ifdef CONFIG_PREEMPT
969 ldr r9, [r8, #TI_PREEMPT] @ get preempt count
970 add r7, r9, #1 @ increment it
971 str r7, [r8, #TI_PREEMPT]
973 1: get_irqnr_and_base r0, r6, r5, lr
976 @ routine called with r0 = irq number, r1 = struct pt_regs *
980 #ifdef CONFIG_PREEMPT
981 ldr r0, [r8, #TI_FLAGS] @ get flags
982 tst r0, #_TIF_NEED_RESCHED
985 ldr r0, [r8, #TI_PREEMPT] @ read preempt value
987 str r9, [r8, #TI_PREEMPT] @ restore preempt count
988 strne r0, [r0, -r0] @ bug()
990 ldr r0, [sp, #S_PSR] @ irqs are already disabled
992 ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr
996 #ifdef CONFIG_PREEMPT
997 svc_preempt: teq r9, #0 @ was preempt count = 0
998 ldreq r6, .LCirq_stat
1000 ldr r0, [r6, #4] @ local_irq_count
1001 ldr r1, [r6, #8] @ local_bh_count
1004 mov r7, #PREEMPT_ACTIVE
1005 str r7, [r8, #TI_PREEMPT] @ set PREEMPT_ACTIVE
1006 1: enable_irq r2 @ enable IRQs
1008 disable_irq r0 @ disable IRQs
1009 ldr r0, [r8, #TI_FLAGS] @ get new tasks TI_FLAGS
1010 tst r0, #_TIF_NEED_RESCHED
1011 beq preempt_return @ go again
1016 __und_svc: sub sp, sp, #S_FRAME_SIZE
1017 stmia sp, {r0 - r12} @ save r0 - r12
1021 add r3, sp, #S_FRAME_SIZE
1023 stmia r2, {r3 - r7} @ save sp_SVC, lr_SVC, pc, cpsr, old_ro
1025 ldr r0, [r5, #-4] @ r0 = instruction
1026 adrsvc al, r9, 1f @ r9 = normal FP return
1027 bl call_fpe @ lr = undefined instr return
1029 mov r0, sp @ struct pt_regs *regs
1033 ldr lr, [sp, #S_PSR] @ Get SVC cpsr
1035 ldmia sp, {r0 - pc}^ @ Restore SVC registers
1038 __pabt_svc: sub sp, sp, #S_FRAME_SIZE
1039 stmia sp, {r0 - r12} @ save r0 - r12
1041 add r0, sp, #S_FRAME_SIZE
1042 ldmia r2, {r2 - r4} @ get pc, cpsr
1045 stmia r5, {r0 - r4} @ save sp_SVC, lr_SVC, pc, cpsr, old_ro
1046 mrs r9, cpsr @ Enable interrupts if they were
1048 biceq r9, r9, #PSR_I_BIT @ previously
1050 mov r0, r2 @ address (pc)
1052 bl do_PrefetchAbort @ call abort handler
1054 ldr r0, [sp, #S_PSR]
1056 ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr
1059 .LCirq: .word __temp_irq
1060 .LCund: .word __temp_und
1061 .LCabt: .word __temp_abt
1063 .LCprocfns: .word processor
1065 .LCfp: .word fp_enter
1066 #ifdef CONFIG_PREEMPT
1067 .LCirq_stat: .word irq_stat
1073 * User mode handlers
1076 __dabt_usr: sub sp, sp, #S_FRAME_SIZE @ Allocate frame size in one go
1077 stmia sp, {r0 - r12} @ save r0 - r12
1080 ldmia r7, {r2 - r4} @ Get USR pc, cpsr
1081 stmia r5, {r2 - r4} @ Save USR pc, cpsr, old_r0
1083 alignment_trap r7, r7, __temp_abt
1086 ldr r4, .LCprocfns @ pass r2, r3 to
1087 mov lr, pc @ processor code
1088 ldr pc, [r4] @ call processor specific code
1090 bl CPU_ABORT_HANDLER
1092 enable_irq r2 @ Enable interrupts
1094 adrsvc al, lr, ret_from_exception
1098 __irq_usr: sub sp, sp, #S_FRAME_SIZE
1099 stmia sp, {r0 - r12} @ save r0 - r12
1102 ldmia r4, {r5 - r7} @ get saved PC, SPSR
1103 stmia r8, {r5 - r7} @ save pc, psr, old_r0
1105 alignment_trap r4, r7, __temp_irq
1107 #ifdef CONFIG_PREEMPT
1109 ldr r9, [r8, #TI_PREEMPT] @ get preempt count
1110 add r7, r9, #1 @ increment it
1111 str r7, [r8, #TI_PREEMPT]
1113 1: get_irqnr_and_base r0, r6, r5, lr
1117 @ routine called with r0 = irq number, r1 = struct pt_regs *
1120 #ifdef CONFIG_PREEMPT
1121 ldr r0, [r8, #TI_PREEMPT]
1123 str r9, [r8, #TI_PREEMPT]
1135 __und_usr: sub sp, sp, #S_FRAME_SIZE @ Allocate frame size in one go
1136 stmia sp, {r0 - r12} @ Save r0 - r12
1140 stmia r8, {r5 - r7} @ Save USR pc, cpsr, old_r0
1141 stmdb r8, {sp, lr}^ @ Save user sp, lr
1142 alignment_trap r4, r7, __temp_und
1144 tst r6, #PSR_T_BIT @ Thumb mode?
1145 bne fpundefinstr @ ignore FP
1147 1: ldrt r0, [r4] @ r0 = instruction
1148 adrsvc al, r9, ret_from_exception @ r9 = normal FP return
1149 adrsvc al, lr, fpundefinstr @ lr = undefined instr return
1152 * The out of line fixup for the ldrt above.
1154 .section .fixup, "ax"
1157 .section __ex_table,"a"
1164 * Check whether the instruction is a co-processor instruction.
1165 * If yes, we need to call the relevant co-processor handler.
1167 * Note that we don't do a full check here for the co-processor
1168 * instructions; all instructions with bit 27 set are well
1169 * defined. The only instructions that should fault are the
1170 * co-processor instructions. However, we have to watch out
1171 * for the ARM6/ARM7 SWI bug.
1173 * Emulators may wish to make use of the following registers:
1174 * r0 - instruction opcode.
1175 * r10 - this threads thread_info structure.
1177 call_fpe: enable_irq r10 @ Enable interrupts
1178 tst r0, #0x08000000 @ only CDP/CPRT/LDC/STC have bit 27
1179 #if defined(CONFIG_CPU_ARM610) || defined(CONFIG_CPU_ARM710)
1180 and r8, r0, #0x0f000000 @ mask out op-code bits
1181 teqne r8, #0x0f000000 @ SWI (ARM6/7 bug)?
1184 get_thread_info r10 @ get current thread
1185 and r8, r0, #0x00000f00 @ mask out CP number
1187 add r6, r10, #TI_USED_CP
1188 strb r7, [r6, r8, lsr #8] @ set appropriate used_cp[]
1189 add pc, pc, r8, lsr #6
1193 b do_fpe @ CP#1 (FPE)
1194 b do_fpe @ CP#2 (FPE)
1203 b do_vfp @ CP#10 (VFP)
1204 b do_vfp @ CP#11 (VFP)
1206 mov pc, lr @ CP#10 (VFP)
1207 mov pc, lr @ CP#11 (VFP)
1211 mov pc, lr @ CP#14 (Debug)
1212 mov pc, lr @ CP#15 (Control)
1214 do_fpe: ldr r4, .LCfp
1215 add r10, r10, #TI_FPSTATE @ r10 = workspace
1216 ldr pc, [r4] @ Call FP module USR entry point
1219 * The FP module is called with these registers set:
1222 * r9 = normal "successful" return address
1223 * r10 = FP workspace
1224 * lr = unrecognised FP instruction return address
1232 fpundefinstr: mov r0, sp
1233 adrsvc al, lr, ret_from_exception
1237 __pabt_usr: sub sp, sp, #S_FRAME_SIZE @ Allocate frame size in one go
1238 stmia sp, {r0 - r12} @ Save r0 - r12
1241 ldmia r4, {r5 - r7} @ Get USR pc, cpsr
1242 stmia r8, {r5 - r7} @ Save USR pc, cpsr, old_r0
1243 stmdb r8, {sp, lr}^ @ Save sp_usr lr_usr
1244 alignment_trap r4, r7, __temp_abt
1246 enable_irq r0 @ Enable interrupts
1247 mov r0, r5 @ address (pc)
1249 bl do_PrefetchAbort @ call abort handler
1252 * This is the return code to user mode for abort handlers
1254 ENTRY(ret_from_exception)
1260 * Register switch for ARMv3 and ARMv4 processors
1261 * r0 = previous task_struct, r1 = previous thread_info, r2 = next thread_info
1262 * previous and next are guaranteed not to be the same.
1265 add ip, r1, #TI_CPU_SAVE
1266 ldr r3, [r2, #TI_CPU_DOMAIN]!
1267 stmia ip, {r4 - sl, fp, sp, lr} @ Store most regs on stack
1268 mcr p15, 0, r3, c3, c0, 0 @ Set domain register
1270 @ Always disable VFP so we can lazily save/restore the old
1271 @ state. This occurs in the context of the previous thread.
1273 bic r4, r4, #FPEXC_ENABLE
1276 ldmib r2, {r4 - sl, fp, sp, pc} @ Load all regs saved previously
1280 * Vector stubs. NOTE that we only align 'vector_IRQ' to a cache line boundary,
1281 * and we rely on each stub being exactly 48 (1.5 cache lines) in size. This
1282 * means that we only ever load two cache lines for this code, or one if we're
1283 * lucky. We also copy this code to 0x200 so that we can use branches in the
1284 * vectors, rather than ldr's.
1289 * Interrupt dispatcher
1290 * Enter in IRQ mode, spsr = SVC/USR CPSR, lr = SVC/USR PC
1293 @ save mode specific registers
1297 str lr, [r13] @ save lr_IRQ
1299 str lr, [r13, #4] @ save spsr_IRQ
1301 @ now branch to the relevant MODE handling routine
1304 bic r13, r13, #MODE_MASK
1305 orr r13, r13, #MODE_SVC
1306 msr spsr, r13 @ switch to SVC_32 mode
1309 ldr lr, [pc, lr, lsl #2]
1310 movs pc, lr @ Changes mode and branches
1312 .LCtab_irq: .word __irq_usr @ 0 (USR_26 / USR_32)
1313 .word __irq_invalid @ 1 (FIQ_26 / FIQ_32)
1314 .word __irq_invalid @ 2 (IRQ_26 / IRQ_32)
1315 .word __irq_svc @ 3 (SVC_26 / SVC_32)
1316 .word __irq_invalid @ 4
1317 .word __irq_invalid @ 5
1318 .word __irq_invalid @ 6
1319 .word __irq_invalid @ 7
1320 .word __irq_invalid @ 8
1321 .word __irq_invalid @ 9
1322 .word __irq_invalid @ a
1323 .word __irq_invalid @ b
1324 .word __irq_invalid @ c
1325 .word __irq_invalid @ d
1326 .word __irq_invalid @ e
1327 .word __irq_invalid @ f
1332 * Data abort dispatcher - dispatches it to the correct handler for the processor mode
1333 * Enter in ABT mode, spsr = USR CPSR, lr = USR PC
1336 @ save mode specific registers
1344 @ now branch to the relevant MODE handling routine
1347 bic r13, r13, #MODE_MASK
1348 orr r13, r13, #MODE_SVC
1349 msr spsr, r13 @ switch to SVC_32 mode
1352 ldr lr, [pc, lr, lsl #2]
1353 movs pc, lr @ Changes mode and branches
1355 .LCtab_dabt: .word __dabt_usr @ 0 (USR_26 / USR_32)
1356 .word __dabt_invalid @ 1 (FIQ_26 / FIQ_32)
1357 .word __dabt_invalid @ 2 (IRQ_26 / IRQ_32)
1358 .word __dabt_svc @ 3 (SVC_26 / SVC_32)
1359 .word __dabt_invalid @ 4
1360 .word __dabt_invalid @ 5
1361 .word __dabt_invalid @ 6
1362 .word __dabt_invalid @ 7
1363 .word __dabt_invalid @ 8
1364 .word __dabt_invalid @ 9
1365 .word __dabt_invalid @ a
1366 .word __dabt_invalid @ b
1367 .word __dabt_invalid @ c
1368 .word __dabt_invalid @ d
1369 .word __dabt_invalid @ e
1370 .word __dabt_invalid @ f
1375 * Prefetch abort dispatcher - dispatches it to the correct handler for the processor mode
1376 * Enter in ABT mode, spsr = USR CPSR, lr = USR PC
1380 @ save mode specific registers
1384 str lr, [r13] @ save lr_ABT
1386 str lr, [r13, #4] @ save spsr_ABT
1388 @ now branch to the relevant MODE handling routine
1391 bic r13, r13, #MODE_MASK
1392 orr r13, r13, #MODE_SVC
1393 msr spsr, r13 @ switch to SVC_32 mode
1396 ldr lr, [pc, lr, lsl #2]
1399 .LCtab_pabt: .word __pabt_usr @ 0 (USR_26 / USR_32)
1400 .word __pabt_invalid @ 1 (FIQ_26 / FIQ_32)
1401 .word __pabt_invalid @ 2 (IRQ_26 / IRQ_32)
1402 .word __pabt_svc @ 3 (SVC_26 / SVC_32)
1403 .word __pabt_invalid @ 4
1404 .word __pabt_invalid @ 5
1405 .word __pabt_invalid @ 6
1406 .word __pabt_invalid @ 7
1407 .word __pabt_invalid @ 8
1408 .word __pabt_invalid @ 9
1409 .word __pabt_invalid @ a
1410 .word __pabt_invalid @ b
1411 .word __pabt_invalid @ c
1412 .word __pabt_invalid @ d
1413 .word __pabt_invalid @ e
1414 .word __pabt_invalid @ f
1419 * Undef instr entry dispatcher - dispatches it to the correct handler for the processor mode
1420 * Enter in UND mode, spsr = SVC/USR CPSR, lr = SVC/USR PC
1424 @ save mode specific registers
1427 str lr, [r13] @ save lr_UND
1429 str lr, [r13, #4] @ save spsr_UND
1431 @ now branch to the relevant MODE handling routine
1434 bic r13, r13, #MODE_MASK
1435 orr r13, r13, #MODE_SVC
1436 msr spsr, r13 @ switch to SVC_32 mode
1439 ldr lr, [pc, lr, lsl #2]
1440 movs pc, lr @ Changes mode and branches
1442 .LCtab_und: .word __und_usr @ 0 (USR_26 / USR_32)
1443 .word __und_invalid @ 1 (FIQ_26 / FIQ_32)
1444 .word __und_invalid @ 2 (IRQ_26 / IRQ_32)
1445 .word __und_svc @ 3 (SVC_26 / SVC_32)
1446 .word __und_invalid @ 4
1447 .word __und_invalid @ 5
1448 .word __und_invalid @ 6
1449 .word __und_invalid @ 7
1450 .word __und_invalid @ 8
1451 .word __und_invalid @ 9
1452 .word __und_invalid @ a
1453 .word __und_invalid @ b
1454 .word __und_invalid @ c
1455 .word __und_invalid @ d
1456 .word __und_invalid @ e
1457 .word __und_invalid @ f
1461 /*=============================================================================
1463 *-----------------------------------------------------------------------------
1464 * Enter in FIQ mode, spsr = ANY CPSR, lr = ANY PC
1465 * MUST PRESERVE SVC SPSR, but need to switch to SVC mode to show our msg.
1466 * Basically to switch modes, we *HAVE* to clobber one register... brain
1467 * damage alert! I don't think that we can execute any code in here in any
1468 * other mode than FIQ... Ok you can switch to another mode, but you can't
1469 * get out of that mode without clobbering one register.
1471 vector_FIQ: disable_fiq
1474 /*=============================================================================
1475 * Address exception handler
1476 *-----------------------------------------------------------------------------
1477 * These aren't too critical.
1478 * (they're not supposed to happen, and won't happen in 32-bit data mode).
1485 * We group all the following data together to optimise
1486 * for CPUs with separate I & D caches.
1490 .LCvswi: .word vector_swi
1492 .LCsirq: .word __temp_irq
1493 .LCsund: .word __temp_und
1494 .LCsabt: .word __temp_abt
1498 .equ __real_stubs_start, .LCvectors + 0x200
1500 .LCvectors: swi SYS_ERROR0
1501 b __real_stubs_start + (vector_undefinstr - __stubs_start)
1502 ldr pc, __real_stubs_start + (.LCvswi - __stubs_start)
1503 b __real_stubs_start + (vector_prefetch - __stubs_start)
1504 b __real_stubs_start + (vector_data - __stubs_start)
1505 b __real_stubs_start + (vector_addrexcptn - __stubs_start)
1506 b __real_stubs_start + (vector_IRQ - __stubs_start)
1507 b __real_stubs_start + (vector_FIQ - __stubs_start)
1510 stmfd sp!, {r4 - r6, lr}
1512 adr r1, .LCvectors @ set up the vectors
1513 ldmia r1, {r1, r2, r3, r4, r5, r6, ip, lr}
1514 stmia r0, {r1, r2, r3, r4, r5, r6, ip, lr}
1517 adr r0, __stubs_start @ copy stubs to 0x200
1523 LOADREGS(fd, sp!, {r4 - r6, pc})
1528 * Do not reorder these, and do not insert extra data between...
1531 __temp_irq: .word 0 @ saved lr_irq
1532 .word 0 @ saved spsr_irq
1534 __temp_und: .word 0 @ Saved lr_und
1535 .word 0 @ Saved spsr_und
1537 __temp_abt: .word 0 @ Saved lr_abt
1538 .word 0 @ Saved spsr_abt
1542 .globl cr_no_alignment