2 * linux/arch/arm/kernel/entry-armv.S
4 * Copyright (C) 1996,1997,1998 Russell King.
5 * ARM700 fix by Matthew Godbolt (linux-user@willothewisp.demon.co.uk)
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
11 * Low-level vector interface routines
13 * Note: there is a StrongARM bug in the STMIA rn, {regs}^ instruction that causes
14 * it to save wrong values... Be aware!
16 #include <linux/config.h>
17 #include <linux/init.h>
19 #include <asm/thread_info.h>
21 #include <asm/ptrace.h>
23 #include "entry-header.S"
26 /* IOC / IOMD based hardware */
27 #include <asm/hardware/iomd.h>
29 .equ ioc_base_high, IOC_BASE & 0xff000000
30 .equ ioc_base_low, IOC_BASE & 0x00ff0000
32 mov r12, #ioc_base_high
34 orr r12, r12, #ioc_base_low
36 strb r12, [r12, #0x38] @ Disable FIQ register
39 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
40 mov r4, #ioc_base_high @ point at IOC
42 orr r4, r4, #ioc_base_low
44 ldrb \irqstat, [r4, #IOMD_IRQREQB] @ get high priority first
45 ldr \base, =irq_prio_h
48 ldreqb \irqstat, [r4, #IOMD_DMAREQ] @ get dma
49 addeq \base, \base, #256 @ irq_prio_h table size
53 ldreqb \irqstat, [r4, #IOMD_IRQREQA] @ get low priority
54 addeq \base, \base, #256 @ irq_prio_d table size
57 ldreqb \irqstat, [r4, #IOMD_IRQREQC]
58 addeq \base, \base, #256 @ irq_prio_l table size
62 ldreqb \irqstat, [r4, #IOMD_IRQREQD]
63 addeq \base, \base, #256 @ irq_prio_lc table size
66 2406: ldrneb \irqnr, [\base, \irqstat] @ get IRQ number
70 * Interrupt table (incorporates priority). Please note that we
71 * rely on the order of these tables (see above code).
74 irq_prio_h: .byte 0, 8, 9, 8,10,10,10,10,11,11,11,11,10,10,10,10
75 .byte 12, 8, 9, 8,10,10,10,10,11,11,11,11,10,10,10,10
76 .byte 13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
77 .byte 13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
78 .byte 14,14,14,14,10,10,10,10,11,11,11,11,10,10,10,10
79 .byte 14,14,14,14,10,10,10,10,11,11,11,11,10,10,10,10
80 .byte 13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
81 .byte 13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
82 .byte 15,15,15,15,10,10,10,10,11,11,11,11,10,10,10,10
83 .byte 15,15,15,15,10,10,10,10,11,11,11,11,10,10,10,10
84 .byte 13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
85 .byte 13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
86 .byte 15,15,15,15,10,10,10,10,11,11,11,11,10,10,10,10
87 .byte 15,15,15,15,10,10,10,10,11,11,11,11,10,10,10,10
88 .byte 13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
89 .byte 13,13,13,13,10,10,10,10,11,11,11,11,10,10,10,10
91 irq_prio_d: .byte 0,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
92 .byte 20,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
93 .byte 21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
94 .byte 21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
95 .byte 22,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
96 .byte 22,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
97 .byte 21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
98 .byte 21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
99 .byte 23,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
100 .byte 23,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
101 .byte 21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
102 .byte 21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
103 .byte 22,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
104 .byte 22,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
105 .byte 21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
106 .byte 21,16,17,16,18,16,17,16,19,16,17,16,18,16,17,16
108 irq_prio_l: .byte 0, 0, 1, 0, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3
109 .byte 4, 0, 1, 0, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3
110 .byte 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5
111 .byte 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5
112 .byte 6, 6, 6, 6, 6, 6, 6, 6, 3, 3, 3, 3, 3, 3, 3, 3
113 .byte 6, 6, 6, 6, 6, 6, 6, 6, 3, 3, 3, 3, 3, 3, 3, 3
114 .byte 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5
115 .byte 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5
116 .byte 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
117 .byte 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
118 .byte 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
119 .byte 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
120 .byte 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
121 .byte 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
122 .byte 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
123 .byte 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
125 irq_prio_lc: .byte 24,24,25,24,26,26,26,26,27,27,27,27,27,27,27,27
126 .byte 28,24,25,24,26,26,26,26,27,27,27,27,27,27,27,27
127 .byte 29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29
128 .byte 29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29
129 .byte 30,30,30,30,30,30,30,30,27,27,27,27,27,27,27,27
130 .byte 30,30,30,30,30,30,30,30,27,27,27,27,27,27,27,27
131 .byte 29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29
132 .byte 29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29
133 .byte 31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
134 .byte 31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
135 .byte 31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
136 .byte 31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
137 .byte 31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
138 .byte 31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
139 .byte 31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
140 .byte 31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31
143 irq_prio_ld: .byte 40,40,41,40,42,42,42,42,43,43,43,43,43,43,43,43
144 .byte 44,40,41,40,42,42,42,42,43,43,43,43,43,43,43,43
145 .byte 45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45
146 .byte 45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45
147 .byte 46,46,46,46,46,46,46,46,43,43,43,43,43,43,43,43
148 .byte 46,46,46,46,46,46,46,46,43,43,43,43,43,43,43,43
149 .byte 45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45
150 .byte 45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45
151 .byte 47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
152 .byte 47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
153 .byte 47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
154 .byte 47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
155 .byte 47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
156 .byte 47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
157 .byte 47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
158 .byte 47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47
162 #elif defined(CONFIG_ARCH_EBSA110)
164 #define IRQ_STAT 0xff000000 /* read */
169 .macro get_irqnr_and_base, irqnr, stat, base, tmp
171 ldrb \stat, [\base] @ get interrupts
174 addeq \irqnr, \irqnr, #4
175 moveq \stat, \stat, lsr #4
177 addeq \irqnr, \irqnr, #2
178 moveq \stat, \stat, lsr #2
180 addeq \irqnr, \irqnr, #1
181 moveq \stat, \stat, lsr #1
182 tst \stat, #1 @ bit 0 should be set
185 .macro irq_prio_table
188 #elif defined(CONFIG_ARCH_SHARK)
193 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
197 strb \irqstat, [r4, #0x20] @outb(0x0C, 0x20) /* Poll command */
198 ldrb \irqnr, [r4, #0x20] @irq = inb(0x20) & 7
199 and \irqstat, \irqnr, #0x80
202 and \irqnr, \irqnr, #7
205 43: mov \irqstat, #0x0C
206 strb \irqstat, [r4, #0xa0] @outb(0x0C, 0xA0) /* Poll command */
207 ldrb \irqnr, [r4, #0xa0] @irq = (inb(0xA0) & 7) + 8
208 and \irqstat, \irqnr, #0x80
211 and \irqnr, \irqnr, #7
212 add \irqnr, \irqnr, #8
216 .macro irq_prio_table
219 #elif defined(CONFIG_FOOTBRIDGE)
220 #include <asm/hardware/dec21285.h>
225 .equ dc21285_high, ARMCSR_BASE & 0xff000000
226 .equ dc21285_low, ARMCSR_BASE & 0x00ffffff
228 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
229 mov r4, #dc21285_high
231 orr r4, r4, #dc21285_low
233 ldr \irqstat, [r4, #0x180] @ get interrupts
235 mov \irqnr, #IRQ_SDRAMPARITY
236 tst \irqstat, #IRQ_MASK_SDRAMPARITY
239 tst \irqstat, #IRQ_MASK_UART_RX
240 movne \irqnr, #IRQ_CONRX
243 tst \irqstat, #IRQ_MASK_DMA1
244 movne \irqnr, #IRQ_DMA1
247 tst \irqstat, #IRQ_MASK_DMA2
248 movne \irqnr, #IRQ_DMA2
251 tst \irqstat, #IRQ_MASK_IN0
252 movne \irqnr, #IRQ_IN0
255 tst \irqstat, #IRQ_MASK_IN1
256 movne \irqnr, #IRQ_IN1
259 tst \irqstat, #IRQ_MASK_IN2
260 movne \irqnr, #IRQ_IN2
263 tst \irqstat, #IRQ_MASK_IN3
264 movne \irqnr, #IRQ_IN3
267 tst \irqstat, #IRQ_MASK_PCI
268 movne \irqnr, #IRQ_PCI
271 tst \irqstat, #IRQ_MASK_DOORBELLHOST
272 movne \irqnr, #IRQ_DOORBELLHOST
275 tst \irqstat, #IRQ_MASK_I2OINPOST
276 movne \irqnr, #IRQ_I2OINPOST
279 tst \irqstat, #IRQ_MASK_TIMER1
280 movne \irqnr, #IRQ_TIMER1
283 tst \irqstat, #IRQ_MASK_TIMER2
284 movne \irqnr, #IRQ_TIMER2
287 tst \irqstat, #IRQ_MASK_TIMER3
288 movne \irqnr, #IRQ_TIMER3
291 tst \irqstat, #IRQ_MASK_UART_TX
292 movne \irqnr, #IRQ_CONTX
295 tst \irqstat, #IRQ_MASK_PCI_ABORT
296 movne \irqnr, #IRQ_PCI_ABORT
299 tst \irqstat, #IRQ_MASK_PCI_SERR
300 movne \irqnr, #IRQ_PCI_SERR
303 tst \irqstat, #IRQ_MASK_DISCARD_TIMER
304 movne \irqnr, #IRQ_DISCARD_TIMER
307 tst \irqstat, #IRQ_MASK_PCI_DPERR
308 movne \irqnr, #IRQ_PCI_DPERR
311 tst \irqstat, #IRQ_MASK_PCI_PERR
312 movne \irqnr, #IRQ_PCI_PERR
316 .macro irq_prio_table
319 #elif defined(CONFIG_ARCH_NEXUSPCI)
324 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
325 ldr \irqstat, =INTCONT_BASE
326 ldr \base, =soft_irq_mask
327 ldr \irqstat, [\irqstat] @ get interrupts
330 and \irqstat, \irqstat, \base @ mask out disabled ones
331 1001: tst \irqstat, #1
332 addeq \irqnr, \irqnr, #1
333 moveq \irqstat, \irqstat, lsr #1
339 .macro irq_prio_table
347 #elif defined(CONFIG_ARCH_TBOX)
352 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
353 ldr \irqstat, =0xffff7000
354 ldr \irqstat, [\irqstat] @ get interrupts
355 ldr \base, =soft_irq_mask
358 and \irqstat, \irqstat, \base @ mask out disabled ones
359 1001: tst \irqstat, #1
360 addeq \irqnr, \irqnr, #1
361 moveq \irqstat, \irqstat, lsr #1
367 .macro irq_prio_table
375 #elif defined(CONFIG_ARCH_SA1100)
380 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
381 mov r4, #0xfa000000 @ ICIP = 0xfa050000
382 add r4, r4, #0x00050000
383 ldr \irqstat, [r4] @ get irqs
384 ldr \irqnr, [r4, #4] @ ICMR = 0xfa050004
385 ands \irqstat, \irqstat, \irqnr
389 moveq \irqstat, \irqstat, lsr #8
390 addeq \irqnr, \irqnr, #8
391 tsteq \irqstat, #0xff
392 moveq \irqstat, \irqstat, lsr #8
393 addeq \irqnr, \irqnr, #8
394 tsteq \irqstat, #0xff
395 moveq \irqstat, \irqstat, lsr #8
396 addeq \irqnr, \irqnr, #8
398 moveq \irqstat, \irqstat, lsr #4
399 addeq \irqnr, \irqnr, #4
401 moveq \irqstat, \irqstat, lsr #2
402 addeq \irqnr, \irqnr, #2
404 addeqs \irqnr, \irqnr, #1
408 .macro irq_prio_table
411 #elif defined(CONFIG_ARCH_L7200)
412 #include <asm/hardware.h>
414 .equ irq_base_addr, IO_BASE_2
419 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
420 mov \irqstat, #irq_base_addr @ Virt addr IRQ regs
421 add \irqstat, \irqstat, #0x00001000 @ Status reg
422 ldr \irqstat, [\irqstat, #0] @ get interrupts
424 1001: tst \irqstat, #1
425 addeq \irqnr, \irqnr, #1
426 moveq \irqstat, \irqstat, lsr #1
432 .macro irq_prio_table
435 #elif defined(CONFIG_ARCH_INTEGRATOR)
440 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
441 /* FIXME: should not be using soo many LDRs here */
442 ldr \base, =IO_ADDRESS(INTEGRATOR_IC_BASE)
443 mov \irqnr, #IRQ_PIC_START
444 ldr \irqstat, [\base, #IRQ_STATUS] @ get masked status
445 ldr \base, =IO_ADDRESS(INTEGRATOR_HDR_BASE)
447 ldreq \irqstat, [\base, #(INTEGRATOR_HDR_IC_OFFSET+IRQ_STATUS)]
448 moveq \irqnr, #IRQ_CIC_START
450 1001: tst \irqstat, #15
452 add \irqnr, \irqnr, #4
453 movs \irqstat, \irqstat, lsr #4
455 1002: tst \irqstat, #1
457 add \irqnr, \irqnr, #1
458 movs \irqstat, \irqstat, lsr #1
460 1003: /* EQ will be set if no irqs pending */
463 .macro irq_prio_table
466 #elif defined(CONFIG_ARCH_VERSATILE_PB)
471 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
472 ldr \base, =IO_ADDRESS(VERSATILE_VIC_BASE)
473 ldr \irqstat, [\base, #VIC_IRQ_STATUS] @ get masked status
478 1001: tst \irqstat, #15
480 add \irqnr, \irqnr, #4
481 movs \irqstat, \irqstat, lsr #4
483 1002: tst \irqstat, #1
485 add \irqnr, \irqnr, #1
486 movs \irqstat, \irqstat, lsr #1
488 1003: /* EQ will be set if no irqs pending */
490 @ clz \irqnr, \irqstat
491 @1003: /* EQ will be set if we reach MAXIRQNUM */
494 .macro irq_prio_table
497 #elif defined(CONFIG_ARCH_CLPS711X)
499 #include <asm/hardware/clps7111.h>
504 #if (INTSR2 - INTSR1) != (INTMR2 - INTMR1)
505 #error INTSR stride != INTMR stride
508 .macro get_irqnr_and_base, irqnr, stat, base, mask
509 mov \base, #CLPS7111_BASE
510 ldr \stat, [\base, #INTSR1]
511 ldr \mask, [\base, #INTMR1]
513 mov \mask, \mask, lsl #16
514 and \stat, \stat, \mask, lsr #16
515 movs \stat, \stat, lsr #4
518 add \base, \base, #INTSR2 - INTSR1
519 ldr \stat, [\base, #INTSR1]
520 ldr \mask, [\base, #INTMR1]
522 mov \mask, \mask, lsl #16
523 and \stat, \stat, \mask, lsr #16
525 1001: tst \stat, #255
526 addeq \irqnr, \irqnr, #8
527 moveq \stat, \stat, lsr #8
529 addeq \irqnr, \irqnr, #4
530 moveq \stat, \stat, lsr #4
532 addeq \irqnr, \irqnr, #2
533 moveq \stat, \stat, lsr #2
535 addeq \irqnr, \irqnr, #1
536 moveq \stat, \stat, lsr #1
537 tst \stat, #1 @ bit 0 should be set
540 .macro irq_prio_table
543 #elif defined (CONFIG_ARCH_CAMELOT)
544 #include <asm/arch/platform.h>
545 #undef IRQ_MODE /* same name defined in asm/proc/ptrace.h */
546 #include <asm/arch/int_ctrl00.h>
551 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
553 ldr \irqstat, =INT_ID(IO_ADDRESS(EXC_INT_CTRL00_BASE))
554 ldr \irqnr,[\irqstat]
556 subne \irqnr,\irqnr,#1
561 .macro irq_prio_table
564 #elif defined(CONFIG_ARCH_IOP310) || defined(CONFIG_ARCH_ADIFCC)
569 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
570 mrc p13, 0, \irqstat, c4, c0, 0 @ get INTSRC
571 mrc p13, 0, \base, c0, c0, 0 @ get INTCTL
573 tst \irqstat, #(1<<29) @ if INTSRC_BI
574 tstne \base, #(1<<3) @ and INTCTL_BM
575 movne \irqnr, #IRQ_XS80200_BCU
578 tst \irqstat, #(1<<28) @ if INTSRC_PI
579 tstne \base, #(1<<2) @ and INTCTL_PM
580 movne \irqnr, #IRQ_XS80200_PMU
583 tst \irqstat, #(1<<31) @ if INTSRC_FI
584 tstne \base, #(1<<0) @ and INTCTL_FM
585 movne \irqnr, #IRQ_XS80200_EXTFIQ
588 tst \irqstat, #(1<<30) @ if INTSRC_II
589 tstne \base, #(1<<1) @ and INTCTL_IM
590 movne \irqnr, #IRQ_XS80200_EXTIRQ
595 .macro irq_prio_table
598 #elif defined(CONFIG_ARCH_IOP321)
603 * Note: only deal with normal interrupts, not FIQ
605 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
607 mrc p6, 0, \irqstat, c8, c0, 0 @ Read IINTSRC
612 subs \irqnr,\base,\irqnr
613 add \irqnr,\irqnr,#IRQ_IOP321_DMA0_EOT
617 .macro irq_prio_table
620 #elif defined(CONFIG_ARCH_PXA)
625 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
627 mrc p6, 0, \irqstat, c0, c0, 0 @ ICIP
628 mrc p6, 0, \irqnr, c1, c0, 0 @ ICMR
630 mov \base, #io_p2v(0x40000000) @ IIR Ctl = 0x40d00000
631 add \base, \base, #0x00d00000
632 ldr \irqstat, [\base, #0] @ ICIP
633 ldr \irqnr, [\base, #4] @ ICMR
635 ands \irqnr, \irqstat, \irqnr
637 rsb \irqstat, \irqnr, #0
638 and \irqstat, \irqstat, \irqnr
640 rsb \irqnr, \irqnr, #(31 - PXA_IRQ_SKIP)
644 .macro irq_prio_table
647 #elif defined (CONFIG_ARCH_IXP4XX)
652 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
653 ldr \irqstat, =(IXP4XX_INTC_BASE_VIRT+IXP4XX_ICIP_OFFSET)
654 ldr \irqstat, [\irqstat] @ get interrupts
659 subs \irqnr, \base, \irqnr
662 1001: tst \irqstat, #1
663 addeq \irqnr, \irqnr, #1
664 moveq \irqstat, \irqstat, lsr #1
672 .macro irq_prio_table
675 #elif defined(CONFIG_ARCH_OMAP)
680 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
681 ldr \base, =IO_ADDRESS(OMAP_IH1_BASE)
682 ldr \irqnr, [\base, #IRQ_ITR]
683 ldr \tmp, [\base, #IRQ_MIR]
684 mov \irqstat, #0xffffffff
685 bic \tmp, \irqstat, \tmp
689 ldr \irqnr, [\base, #IRQ_SIR_FIQ]
691 ldreq \irqnr, [\base, #IRQ_SIR_IRQ]
692 cmpeq \irqnr, #INT_IH2_IRQ
693 ldreq \base, =IO_ADDRESS(OMAP_IH2_BASE)
694 ldreq \irqnr, [\base, #IRQ_SIR_IRQ]
695 addeqs \irqnr, \irqnr, #32
699 .macro irq_prio_table
702 #elif defined(CONFIG_ARCH_S3C2410)
703 /* S3C2410X IRQ Handler, <ben@simtec.co.uk> */
705 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
708 mov \tmp, #S3C2410_VA_IRQ
709 ldr \irqnr, [ \tmp, #0x14 ] @ get irq no
712 beq 1002f @ external irq reg
714 beq 1003f @ lcd controller
716 @ debug check to see if interrupt reported is the same
721 ldr \irqstat, [ \tmp, #0x10 ] @ INTPND
722 mov \irqstat, \irqstat, lsr \irqnr
727 stmfd r13!, { r0 - r4 , r14 }
728 ldr r1, [ \tmp, #0x14 ] @ intoffset
729 ldr r2, [ \tmp, #0x10 ] @ INTPND
730 ldr r3, [ \tmp, #0x00 ] @ SRCPND
736 .ascii "<7>irq: err - bad offset %d, intpnd=%08x, srcpnd=%08x\n"
741 mov \tmp, #S3C2410_VA_IRQ
742 ldmfd r13!, { r0 - r4 , r14 }
744 @ try working out interript number for ourselves
746 ldr \irqstat, [ \tmp, #0x10 ] @ INTPND
748 movs \irqstat, \irqstat, lsr#1
749 bcs 30000b @ try and re-start the proccess
750 add \irqnr, \irqnr, #1
754 @ found no interrupt, set Z flag and leave
760 @ we base the s3c2410x interrupts at 16 and above to allow
761 @ isa peripherals to have their standard interrupts, also
762 @ ensure that Z flag is un-set on exit
764 @ note, we cannot be sure if we get IRQ_EINT0 (0) that
765 @ there is simply no interrupt pending, so in all other
766 @ cases we jump to say we have found something, otherwise
767 @ we check to see if the interrupt really is assrted
768 adds \irqnr, \irqnr, #IRQ_EINT0
769 teq \irqnr, #IRQ_EINT0
771 ldr \irqstat, [ \tmp, #0x10 ] @ INTPND
776 @ we get here from no main or external interrupts pending
778 add \tmp, \tmp, #S3C2410_VA_GPIO - S3C2410_VA_IRQ
779 ldr \irqstat, [ \tmp, # 0xa8 ] @ EXTINTPEND
780 ldr \irqnr, [ \tmp, # 0xa4 ] @ EXTINTMASK
782 bic \irqstat, \irqstat, \irqnr @ clear masked irqs
784 mov \irqnr, #IRQ_EINT4 @ start extint nos
785 mov \irqstat, \irqstat, lsr#4 @ ignore bottom 4 bits
787 movs \irqstat, \irqstat, lsr#1
789 add \irqnr, \irqnr, #1
790 cmp \irqnr, #IRQ_EINT23
793 @ found no interrupt, set Z flag and leave
798 @ lcd interrupt has been asserted...
799 add \tmp, \tmp, #S3C2410_VA_LCD - S3C2410_VA_IRQ
800 ldr \irqstat, [ \tmp, # 0x54 ] @ lcd int pending
803 movne \irqnr, #IRQ_LCD_FRAME
805 movne \irqnr, #IRQ_LCD_FIFO
807 @ fall through to exit with flags updated
809 1004: @ ensure Z flag clear in case our MOVS shifted out the last bit
816 /* currently don't need an disable_fiq macro */
821 /* we don't have an irq priority table */
822 .macro irq_prio_table
825 #elif defined(CONFIG_ARCH_LH7A400)
827 # if defined (CONFIG_ARCH_LH7A404)
828 # error "LH7A400 and LH7A404 are mutually exclusive"
833 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
835 mov \base, #io_p2v(0x80000000) @ APB registers
836 ldr \irqstat, [\base, #0x500] @ PIC INTSR
838 1001: movs \irqstat, \irqstat, lsr #1 @ Shift into carry
839 bcs 1008f @ Bit set; irq found
840 add \irqnr, \irqnr, #1
841 bne 1001b @ Until no bits
842 b 1009f @ Nothing? Hmm.
843 1008: movs \irqstat, #1 @ Force !Z
847 .macro irq_prio_table
850 #elif defined(CONFIG_ARCH_LH7A404)
855 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
856 mov \irqnr, #0 @ VIC1 irq base
857 mov \base, #io_p2v(0x80000000) @ APB registers
858 add \base, \base, #0x8000
859 ldr \tmp, [\base, #0x0030] @ VIC1_VECTADDR
860 tst \tmp, #VA_VECTORED @ Direct vectored
862 tst \tmp, #VA_VIC1DEFAULT @ Default vectored VIC1
863 ldrne \irqstat, [\base, #0] @ VIC1_IRQSTATUS
865 add \base, \base, #(0xa000 - 0x8000)
866 ldr \tmp, [\base, #0x0030] @ VIC2_VECTADDR
867 tst \tmp, #VA_VECTORED @ Direct vectored
869 ldr \irqstat, [\base, #0] @ VIC2_IRQSTATUS
870 mov \irqnr, #32 @ VIC2 irq base
872 1001: movs \irqstat, \irqstat, lsr #1 @ Shift into carry
873 bcs 1008f @ Bit set; irq found
874 add \irqnr, \irqnr, #1
875 bne 1001b @ Until no bits
876 b 1009f @ Nothing? Hmm.
877 1002: and \irqnr, \tmp, #0x3f @ Mask for valid bits
878 1008: movs \irqstat, #1 @ Force !Z
879 str \tmp, [\base, #0x0030] @ Clear vector
883 .macro irq_prio_table
887 #error Unknown architecture
891 * Invalid mode handlers
893 __pabt_invalid: sub sp, sp, #S_FRAME_SIZE @ Allocate frame size in one go
894 stmia sp, {r0 - lr} @ Save XXX r0 - lr
896 mov r1, #BAD_PREFETCH
899 __dabt_invalid: sub sp, sp, #S_FRAME_SIZE
900 stmia sp, {r0 - lr} @ Save SVC r0 - lr [lr *should* be intact]
905 __irq_invalid: sub sp, sp, #S_FRAME_SIZE @ Allocate space on stack for frame
906 stmfd sp, {r0 - lr} @ Save r0 - lr
911 __und_invalid: sub sp, sp, #S_FRAME_SIZE
914 mov r1, #BAD_UNDEFINSTR @ int reason
917 ldmia r4, {r5 - r7} @ Get XXX pc, cpsr, old_r0
919 stmia r4, {r5 - r7} @ Save XXX pc, cpsr, old_r0
921 and r2, r6, #31 @ int mode
928 __dabt_svc: sub sp, sp, #S_FRAME_SIZE
929 stmia sp, {r0 - r12} @ save r0 - r12
931 add r0, sp, #S_FRAME_SIZE
932 ldmia r2, {r2 - r4} @ get pc, cpsr
935 stmia r5, {r0 - r4} @ save sp_SVC, lr_SVC, pc, cpsr, old_ro
936 mrs r9, cpsr @ Enable interrupts if they were
938 biceq r9, r9, #PSR_I_BIT @ previously
940 * This routine must not corrupt r9
943 ldr r4, .LCprocfns @ pass r2, r3 to
944 mov lr, pc @ processor code
945 ldr pc, [r4] @ call processor specific code
955 ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr
958 __irq_svc: sub sp, sp, #S_FRAME_SIZE
959 stmia sp, {r0 - r12} @ save r0 - r12
961 add r5, sp, #S_FRAME_SIZE
965 stmia r4, {r5, r6, r7, r8, r9} @ save sp_SVC, lr_SVC, pc, cpsr, old_ro
966 #ifdef CONFIG_PREEMPT
968 ldr r9, [r8, #TI_PREEMPT] @ get preempt count
969 add r7, r9, #1 @ increment it
970 str r7, [r8, #TI_PREEMPT]
972 1: get_irqnr_and_base r0, r6, r5, lr
975 @ routine called with r0 = irq number, r1 = struct pt_regs *
979 #ifdef CONFIG_PREEMPT
980 ldr r0, [r8, #TI_FLAGS] @ get flags
981 tst r0, #_TIF_NEED_RESCHED
984 ldr r0, [r8, #TI_PREEMPT] @ read preempt value
986 str r9, [r8, #TI_PREEMPT] @ restore preempt count
987 strne r0, [r0, -r0] @ bug()
989 ldr r0, [sp, #S_PSR] @ irqs are already disabled
991 ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr
995 #ifdef CONFIG_PREEMPT
996 svc_preempt: teq r9, #0 @ was preempt count = 0
997 ldreq r6, .LCirq_stat
999 ldr r0, [r6, #4] @ local_irq_count
1000 ldr r1, [r6, #8] @ local_bh_count
1003 mov r7, #PREEMPT_ACTIVE
1004 str r7, [r8, #TI_PREEMPT] @ set PREEMPT_ACTIVE
1005 1: enable_irq r2 @ enable IRQs
1007 disable_irq r0 @ disable IRQs
1008 ldr r0, [r8, #TI_FLAGS] @ get new tasks TI_FLAGS
1009 tst r0, #_TIF_NEED_RESCHED
1010 beq preempt_return @ go again
1015 __und_svc: sub sp, sp, #S_FRAME_SIZE
1016 stmia sp, {r0 - r12} @ save r0 - r12
1020 add r3, sp, #S_FRAME_SIZE
1022 stmia r2, {r3 - r7} @ save sp_SVC, lr_SVC, pc, cpsr, old_ro
1024 ldr r0, [r5, #-4] @ r0 = instruction
1025 adrsvc al, r9, 1f @ r9 = normal FP return
1026 bl call_fpe @ lr = undefined instr return
1028 mov r0, sp @ struct pt_regs *regs
1032 ldr lr, [sp, #S_PSR] @ Get SVC cpsr
1034 ldmia sp, {r0 - pc}^ @ Restore SVC registers
1037 __pabt_svc: sub sp, sp, #S_FRAME_SIZE
1038 stmia sp, {r0 - r12} @ save r0 - r12
1040 add r0, sp, #S_FRAME_SIZE
1041 ldmia r2, {r2 - r4} @ get pc, cpsr
1044 stmia r5, {r0 - r4} @ save sp_SVC, lr_SVC, pc, cpsr, old_ro
1045 mrs r9, cpsr @ Enable interrupts if they were
1047 biceq r9, r9, #PSR_I_BIT @ previously
1049 mov r0, r2 @ address (pc)
1051 bl do_PrefetchAbort @ call abort handler
1053 ldr r0, [sp, #S_PSR]
1055 ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr
1058 .LCirq: .word __temp_irq
1059 .LCund: .word __temp_und
1060 .LCabt: .word __temp_abt
1062 .LCprocfns: .word processor
1064 .LCfp: .word fp_enter
1065 #ifdef CONFIG_PREEMPT
1066 .LCirq_stat: .word irq_stat
1072 * User mode handlers
1075 __dabt_usr: sub sp, sp, #S_FRAME_SIZE @ Allocate frame size in one go
1076 stmia sp, {r0 - r12} @ save r0 - r12
1079 ldmia r7, {r2 - r4} @ Get USR pc, cpsr
1080 stmia r5, {r2 - r4} @ Save USR pc, cpsr, old_r0
1082 alignment_trap r7, r7, __temp_abt
1085 ldr r4, .LCprocfns @ pass r2, r3 to
1086 mov lr, pc @ processor code
1087 ldr pc, [r4] @ call processor specific code
1089 bl CPU_ABORT_HANDLER
1091 enable_irq r2 @ Enable interrupts
1093 adrsvc al, lr, ret_from_exception
1097 __irq_usr: sub sp, sp, #S_FRAME_SIZE
1098 stmia sp, {r0 - r12} @ save r0 - r12
1101 ldmia r4, {r5 - r7} @ get saved PC, SPSR
1102 stmia r8, {r5 - r7} @ save pc, psr, old_r0
1104 alignment_trap r4, r7, __temp_irq
1106 #ifdef CONFIG_PREEMPT
1108 ldr r9, [r8, #TI_PREEMPT] @ get preempt count
1109 add r7, r9, #1 @ increment it
1110 str r7, [r8, #TI_PREEMPT]
1112 1: get_irqnr_and_base r0, r6, r5, lr
1116 @ routine called with r0 = irq number, r1 = struct pt_regs *
1119 #ifdef CONFIG_PREEMPT
1120 ldr r0, [r8, #TI_PREEMPT]
1122 str r9, [r8, #TI_PREEMPT]
1134 __und_usr: sub sp, sp, #S_FRAME_SIZE @ Allocate frame size in one go
1135 stmia sp, {r0 - r12} @ Save r0 - r12
1139 stmia r8, {r5 - r7} @ Save USR pc, cpsr, old_r0
1140 stmdb r8, {sp, lr}^ @ Save user sp, lr
1141 alignment_trap r4, r7, __temp_und
1143 tst r6, #PSR_T_BIT @ Thumb mode?
1144 bne fpundefinstr @ ignore FP
1146 1: ldrt r0, [r4] @ r0 = instruction
1147 adrsvc al, r9, ret_from_exception @ r9 = normal FP return
1148 adrsvc al, lr, fpundefinstr @ lr = undefined instr return
1151 * The out of line fixup for the ldrt above.
1153 .section .fixup, "ax"
1156 .section __ex_table,"a"
1163 * Check whether the instruction is a co-processor instruction.
1164 * If yes, we need to call the relevant co-processor handler.
1166 * Note that we don't do a full check here for the co-processor
1167 * instructions; all instructions with bit 27 set are well
1168 * defined. The only instructions that should fault are the
1169 * co-processor instructions. However, we have to watch out
1170 * for the ARM6/ARM7 SWI bug.
1172 * Emulators may wish to make use of the following registers:
1173 * r0 - instruction opcode.
1174 * r10 - this threads thread_info structure.
1176 call_fpe: enable_irq r10 @ Enable interrupts
1177 tst r0, #0x08000000 @ only CDP/CPRT/LDC/STC have bit 27
1178 #if defined(CONFIG_CPU_ARM610) || defined(CONFIG_CPU_ARM710)
1179 and r8, r0, #0x0f000000 @ mask out op-code bits
1180 teqne r8, #0x0f000000 @ SWI (ARM6/7 bug)?
1183 get_thread_info r10 @ get current thread
1184 and r8, r0, #0x00000f00 @ mask out CP number
1186 add r6, r10, #TI_USED_CP
1187 strb r7, [r6, r8, lsr #8] @ set appropriate used_cp[]
1188 add pc, pc, r8, lsr #6
1192 b do_fpe @ CP#1 (FPE)
1193 b do_fpe @ CP#2 (FPE)
1201 mov pc, lr @ CP#10 (VFP)
1202 mov pc, lr @ CP#11 (VFP)
1205 mov pc, lr @ CP#14 (Debug)
1206 mov pc, lr @ CP#15 (Control)
1208 do_fpe: ldr r4, .LCfp
1209 add r10, r10, #TI_FPSTATE @ r10 = workspace
1210 ldr pc, [r4] @ Call FP module USR entry point
1213 * The FP module is called with these registers set:
1216 * r9 = normal "successful" return address
1217 * r10 = FP workspace
1218 * lr = unrecognised FP instruction return address
1226 fpundefinstr: mov r0, sp
1227 adrsvc al, lr, ret_from_exception
1231 __pabt_usr: sub sp, sp, #S_FRAME_SIZE @ Allocate frame size in one go
1232 stmia sp, {r0 - r12} @ Save r0 - r12
1235 ldmia r4, {r5 - r7} @ Get USR pc, cpsr
1236 stmia r8, {r5 - r7} @ Save USR pc, cpsr, old_r0
1237 stmdb r8, {sp, lr}^ @ Save sp_usr lr_usr
1238 alignment_trap r4, r7, __temp_abt
1240 enable_irq r0 @ Enable interrupts
1241 mov r0, r5 @ address (pc)
1243 bl do_PrefetchAbort @ call abort handler
1246 * This is the return code to user mode for abort handlers
1248 ENTRY(ret_from_exception)
1254 * Register switch for ARMv3 and ARMv4 processors
1255 * r0 = previous task_struct, r1 = previous thread_info, r2 = next thread_info
1256 * previous and next are guaranteed not to be the same.
1259 add ip, r1, #TI_CPU_SAVE
1260 ldr r3, [r2, #TI_CPU_DOMAIN]!
1261 stmia ip, {r4 - sl, fp, sp, lr} @ Store most regs on stack
1262 mcr p15, 0, r3, c3, c0, 0 @ Set domain register
1263 ldmib r2, {r4 - sl, fp, sp, pc} @ Load all regs saved previously
1267 * Vector stubs. NOTE that we only align 'vector_IRQ' to a cache line boundary,
1268 * and we rely on each stub being exactly 48 (1.5 cache lines) in size. This
1269 * means that we only ever load two cache lines for this code, or one if we're
1270 * lucky. We also copy this code to 0x200 so that we can use branches in the
1271 * vectors, rather than ldr's.
1276 * Interrupt dispatcher
1277 * Enter in IRQ mode, spsr = SVC/USR CPSR, lr = SVC/USR PC
1280 @ save mode specific registers
1284 str lr, [r13] @ save lr_IRQ
1286 str lr, [r13, #4] @ save spsr_IRQ
1288 @ now branch to the relevant MODE handling routine
1291 bic r13, r13, #MODE_MASK
1292 orr r13, r13, #MODE_SVC
1293 msr spsr, r13 @ switch to SVC_32 mode
1296 ldr lr, [pc, lr, lsl #2]
1297 movs pc, lr @ Changes mode and branches
1299 .LCtab_irq: .word __irq_usr @ 0 (USR_26 / USR_32)
1300 .word __irq_invalid @ 1 (FIQ_26 / FIQ_32)
1301 .word __irq_invalid @ 2 (IRQ_26 / IRQ_32)
1302 .word __irq_svc @ 3 (SVC_26 / SVC_32)
1303 .word __irq_invalid @ 4
1304 .word __irq_invalid @ 5
1305 .word __irq_invalid @ 6
1306 .word __irq_invalid @ 7
1307 .word __irq_invalid @ 8
1308 .word __irq_invalid @ 9
1309 .word __irq_invalid @ a
1310 .word __irq_invalid @ b
1311 .word __irq_invalid @ c
1312 .word __irq_invalid @ d
1313 .word __irq_invalid @ e
1314 .word __irq_invalid @ f
1319 * Data abort dispatcher - dispatches it to the correct handler for the processor mode
1320 * Enter in ABT mode, spsr = USR CPSR, lr = USR PC
1323 @ save mode specific registers
1331 @ now branch to the relevant MODE handling routine
1334 bic r13, r13, #MODE_MASK
1335 orr r13, r13, #MODE_SVC
1336 msr spsr, r13 @ switch to SVC_32 mode
1339 ldr lr, [pc, lr, lsl #2]
1340 movs pc, lr @ Changes mode and branches
1342 .LCtab_dabt: .word __dabt_usr @ 0 (USR_26 / USR_32)
1343 .word __dabt_invalid @ 1 (FIQ_26 / FIQ_32)
1344 .word __dabt_invalid @ 2 (IRQ_26 / IRQ_32)
1345 .word __dabt_svc @ 3 (SVC_26 / SVC_32)
1346 .word __dabt_invalid @ 4
1347 .word __dabt_invalid @ 5
1348 .word __dabt_invalid @ 6
1349 .word __dabt_invalid @ 7
1350 .word __dabt_invalid @ 8
1351 .word __dabt_invalid @ 9
1352 .word __dabt_invalid @ a
1353 .word __dabt_invalid @ b
1354 .word __dabt_invalid @ c
1355 .word __dabt_invalid @ d
1356 .word __dabt_invalid @ e
1357 .word __dabt_invalid @ f
1362 * Prefetch abort dispatcher - dispatches it to the correct handler for the processor mode
1363 * Enter in ABT mode, spsr = USR CPSR, lr = USR PC
1367 @ save mode specific registers
1371 str lr, [r13] @ save lr_ABT
1373 str lr, [r13, #4] @ save spsr_ABT
1375 @ now branch to the relevant MODE handling routine
1378 bic r13, r13, #MODE_MASK
1379 orr r13, r13, #MODE_SVC
1380 msr spsr, r13 @ switch to SVC_32 mode
1383 ldr lr, [pc, lr, lsl #2]
1386 .LCtab_pabt: .word __pabt_usr @ 0 (USR_26 / USR_32)
1387 .word __pabt_invalid @ 1 (FIQ_26 / FIQ_32)
1388 .word __pabt_invalid @ 2 (IRQ_26 / IRQ_32)
1389 .word __pabt_svc @ 3 (SVC_26 / SVC_32)
1390 .word __pabt_invalid @ 4
1391 .word __pabt_invalid @ 5
1392 .word __pabt_invalid @ 6
1393 .word __pabt_invalid @ 7
1394 .word __pabt_invalid @ 8
1395 .word __pabt_invalid @ 9
1396 .word __pabt_invalid @ a
1397 .word __pabt_invalid @ b
1398 .word __pabt_invalid @ c
1399 .word __pabt_invalid @ d
1400 .word __pabt_invalid @ e
1401 .word __pabt_invalid @ f
1406 * Undef instr entry dispatcher - dispatches it to the correct handler for the processor mode
1407 * Enter in UND mode, spsr = SVC/USR CPSR, lr = SVC/USR PC
1411 @ save mode specific registers
1414 str lr, [r13] @ save lr_UND
1416 str lr, [r13, #4] @ save spsr_UND
1418 @ now branch to the relevant MODE handling routine
1421 bic r13, r13, #MODE_MASK
1422 orr r13, r13, #MODE_SVC
1423 msr spsr, r13 @ switch to SVC_32 mode
1426 ldr lr, [pc, lr, lsl #2]
1427 movs pc, lr @ Changes mode and branches
1429 .LCtab_und: .word __und_usr @ 0 (USR_26 / USR_32)
1430 .word __und_invalid @ 1 (FIQ_26 / FIQ_32)
1431 .word __und_invalid @ 2 (IRQ_26 / IRQ_32)
1432 .word __und_svc @ 3 (SVC_26 / SVC_32)
1433 .word __und_invalid @ 4
1434 .word __und_invalid @ 5
1435 .word __und_invalid @ 6
1436 .word __und_invalid @ 7
1437 .word __und_invalid @ 8
1438 .word __und_invalid @ 9
1439 .word __und_invalid @ a
1440 .word __und_invalid @ b
1441 .word __und_invalid @ c
1442 .word __und_invalid @ d
1443 .word __und_invalid @ e
1444 .word __und_invalid @ f
1448 /*=============================================================================
1450 *-----------------------------------------------------------------------------
1451 * Enter in FIQ mode, spsr = ANY CPSR, lr = ANY PC
1452 * MUST PRESERVE SVC SPSR, but need to switch to SVC mode to show our msg.
1453 * Basically to switch modes, we *HAVE* to clobber one register... brain
1454 * damage alert! I don't think that we can execute any code in here in any
1455 * other mode than FIQ... Ok you can switch to another mode, but you can't
1456 * get out of that mode without clobbering one register.
1458 vector_FIQ: disable_fiq
1461 /*=============================================================================
1462 * Address exception handler
1463 *-----------------------------------------------------------------------------
1464 * These aren't too critical.
1465 * (they're not supposed to happen, and won't happen in 32-bit data mode).
1472 * We group all the following data together to optimise
1473 * for CPUs with separate I & D caches.
1477 .LCvswi: .word vector_swi
1479 .LCsirq: .word __temp_irq
1480 .LCsund: .word __temp_und
1481 .LCsabt: .word __temp_abt
1485 .equ __real_stubs_start, .LCvectors + 0x200
1487 .LCvectors: swi SYS_ERROR0
1488 b __real_stubs_start + (vector_undefinstr - __stubs_start)
1489 ldr pc, __real_stubs_start + (.LCvswi - __stubs_start)
1490 b __real_stubs_start + (vector_prefetch - __stubs_start)
1491 b __real_stubs_start + (vector_data - __stubs_start)
1492 b __real_stubs_start + (vector_addrexcptn - __stubs_start)
1493 b __real_stubs_start + (vector_IRQ - __stubs_start)
1494 b __real_stubs_start + (vector_FIQ - __stubs_start)
1497 stmfd sp!, {r4 - r6, lr}
1499 adr r1, .LCvectors @ set up the vectors
1500 ldmia r1, {r1, r2, r3, r4, r5, r6, ip, lr}
1501 stmia r0, {r1, r2, r3, r4, r5, r6, ip, lr}
1504 adr r0, __stubs_start @ copy stubs to 0x200
1510 LOADREGS(fd, sp!, {r4 - r6, pc})
1515 * Do not reorder these, and do not insert extra data between...
1518 __temp_irq: .word 0 @ saved lr_irq
1519 .word 0 @ saved spsr_irq
1521 __temp_und: .word 0 @ Saved lr_und
1522 .word 0 @ Saved spsr_und
1524 __temp_abt: .word 0 @ Saved lr_abt
1525 .word 0 @ Saved spsr_abt
1529 .globl cr_no_alignment