.macro irq_prio_table
.endm
-#elif defined(CONFIG_ARCH_IOP310)
-
- .macro disable_fiq
- .endm
-
- .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
- mrc p13, 0, \irqstat, c4, c0, 0 @ get INTSRC
- mrc p13, 0, \base, c0, c0, 0 @ get INTCTL
-
- tst \irqstat, #(1<<29) @ if INTSRC_BI
- tstne \base, #(1<<3) @ and INTCTL_BM
- movne \irqnr, #IRQ_XS80200_BCU
- bne 1001f
-
- tst \irqstat, #(1<<28) @ if INTSRC_PI
- tstne \base, #(1<<2) @ and INTCTL_PM
- movne \irqnr, #IRQ_XS80200_PMU
- bne 1001f
-
- tst \irqstat, #(1<<31) @ if INTSRC_FI
- tstne \base, #(1<<0) @ and INTCTL_FM
- movne \irqnr, #IRQ_XS80200_EXTFIQ
- bne 1001f
-
- tst \irqstat, #(1<<30) @ if INTSRC_II
- tstne \base, #(1<<1) @ and INTCTL_IM
- movne \irqnr, #IRQ_XS80200_EXTIRQ
-
-1001:
- .endm
-
- .macro irq_prio_table
- .endm
-
#elif defined(CONFIG_ARCH_IOP321)
.macro disable_fiq
.endm
.macro irq_prio_table
.endm
+#elif defined(CONFIG_ARCH_IOP331)
+ .macro disable_fiq
+ .endm
+
+ /*
+ * Note: only deal with normal interrupts, not FIQ
+ */
+ .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
+ mov \irqnr, #0
+ mrc p6, 0, \irqstat, c4, c0, 0 @ Read IINTSRC0
+ cmp \irqstat, #0
+ bne 1002f
+ mrc p6, 0, \irqstat, c5, c0, 0 @ Read IINTSRC1
+ cmp \irqstat, #0
+ beq 1001f
+ clz \irqnr, \irqstat
+/*
+ * mov \base, #31
+ * subs \irqnr,\base,\irqnr
+ */
+ rsbs \irqnr,\irqnr,#31 @ recommend by RMK
+ add \irqnr,\irqnr,#IRQ_IOP331_XINT8
+ b 1001f
+1002: clz \irqnr, \irqstat
+ mov \base, #31
+ subs \irqnr,\base,\irqnr
+ add \irqnr,\irqnr,#IRQ_IOP331_DMA0_EOT
+1001:
+ .endm
+
+ .macro irq_prio_table
+ .endm
+
#elif defined(CONFIG_ARCH_PXA)
.macro disable_fiq
.macro irq_prio_table
.endm
+#elif defined(CONFIG_ARCH_IXP2000)
+
+ .macro disable_fiq
+ .endm
+
+ .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
+
+ mov \irqnr, #0x0 @clear out irqnr as default
+ mov \base, #0xfe000000
+ orr \base, \base, #0x00ff0000
+ orr \base, \base, #0x0000a000
+ orr \base, \base, #0x08
+ ldr \irqstat, [\base] @ get interrupts
+ mov \tmp, #IXP2000_VALID_IRQ_MASK & 0xff000000
+ orr \tmp, \tmp, #IXP2000_VALID_IRQ_MASK & 0x00ff0000
+ orr \tmp, \tmp, #IXP2000_VALID_IRQ_MASK & 0x0000ff00
+ orr \tmp, \tmp, #IXP2000_VALID_IRQ_MASK & 0x000000ff
+ and \irqstat, \irqstat, \tmp
+
+ cmp \irqstat, #0
+ beq 1001f
+
+ clz \irqnr, \irqstat
+ mov \base, #31
+ subs \irqnr, \base, \irqnr
+
+ /*
+ * We handle PCIA and PCIB here so we don't have an
+ * extra layer of code just to check these two bits.
+ */
+ cmp \irqnr, #IRQ_IXP2000_PCI
+ bne 1001f
+
+ mov \base, #0xfe000000
+ orr \base, \base, #0x00fd0000
+ orr \base, \base, #0x0000e100
+ orr \base, \base, #0x00000058
+ ldr \irqstat, [\base]
+
+ mov \tmp, #(1<<26)
+ tst \irqstat, \tmp
+ movne \irqnr, #IRQ_IXP2000_PCIA
+ bne 1001f
+
+ mov \tmp, #(1<<27)
+ tst \irqstat, \tmp
+ movne \irqnr, #IRQ_IXP2000_PCIB
+
+1001:
+ .endm
+
+ .macro irq_prio_table
+ .endm
+
#elif defined (CONFIG_ARCH_IXP4XX)
.macro disable_fiq
.macro irq_prio_table
.endm
+#elif defined(CONFIG_ARCH_IMX)
+
+ .macro disable_fiq
+ .endm
+#define AITC_NIVECSR 0x40
+ .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
+ ldr \irqstat, =IO_ADDRESS(IMX_AITC_BASE)
+ @ Load offset & priority of the highest priority
+ @ interrupt pending.
+ ldr \irqnr, [\irqstat, #AITC_NIVECSR]
+ @ Shift off the priority leaving the offset or
+ @ "interrupt number"
+ mov \irqnr, \irqnr, lsr #16
+ ldr \irqstat, =1 @ dummy compare
+ ldr \base, =0xFFFF // invalid interrupt
+ cmp \irqnr, \base
+ bne 1001f
+ ldr \irqstat, =0
+1001:
+ tst \irqstat, #1 @ to make the condition code = TRUE
+ .endm
+
+ .macro irq_prio_table
+ .endm
+
+#elif defined(CONFIG_ARCH_H720X)
+
+ .macro disable_fiq
+ .endm
+
+ .macro get_irqnr_and_base, irqnr, irqstat, base, tmp
+#if defined (CONFIG_CPU_H7201) || defined (CONFIG_CPU_H7202)
+ @ we could use the id register on H7202, but this is not
+ @ properly updated when we come back from asm_do_irq
+ @ without a previous return from interrupt
+ @ (see loops below in irq_svc, irq_usr)
+ @ We see unmasked pending ints only, as the masked pending ints
+ @ are not visible here
+
+ mov \base, #0xf0000000 @ base register
+ orr \base, \base, #0x24000 @ irqbase
+ ldr \irqstat, [\base, #0x04] @ get interrupt status
+#if defined (CONFIG_CPU_H7201)
+ ldr \tmp, =0x001fffff
+#else
+ mvn \tmp, #0xc0000000
+#endif
+ and \irqstat, \irqstat, \tmp @ mask out unused ints
+ mov \irqnr, #0
+
+ mov \tmp, #0xff00
+ orr \tmp, \tmp, #0xff
+ tst \irqstat, \tmp
+ addeq \irqnr, \irqnr, #16
+ moveq \irqstat, \irqstat, lsr #16
+ tst \irqstat, #255
+ addeq \irqnr, \irqnr, #8
+ moveq \irqstat, \irqstat, lsr #8
+ tst \irqstat, #15
+ addeq \irqnr, \irqnr, #4
+ moveq \irqstat, \irqstat, lsr #4
+ tst \irqstat, #3
+ addeq \irqnr, \irqnr, #2
+ moveq \irqstat, \irqstat, lsr #2
+ tst \irqstat, #1
+ addeq \irqnr, \irqnr, #1
+ moveq \irqstat, \irqstat, lsr #1
+ tst \irqstat, #1 @ bit 0 should be set
+ .endm
+
+ .macro irq_prio_table
+ .endm
+
+#else
+#error hynix processor selection missmatch
+#endif
#else
#error Unknown architecture
#endif
* r0 - instruction opcode.
* r10 - this threads thread_info structure.
*/
-call_fpe: enable_irq r10 @ Enable interrupts
+call_fpe:
tst r0, #0x08000000 @ only CDP/CPRT/LDC/STC have bit 27
#if defined(CONFIG_CPU_ARM610) || defined(CONFIG_CPU_ARM710)
and r8, r0, #0x0f000000 @ mask out op-code bits
mov r7, #1
add r6, r10, #TI_USED_CP
strb r7, [r6, r8, lsr #8] @ set appropriate used_cp[]
+#ifdef CONFIG_IWMMXT
+ @ Test if we need to give access to iWMMXt coprocessors
+ ldr r5, [r10, #TI_FLAGS]
+ rsbs r7, r8, #(1 << 8) @ CP 0 or 1 only
+ movcss r7, r5, lsr #(TIF_USING_IWMMXT + 1)
+ bcs iwmmxt_task_enable
+#endif
+ enable_irq r7
add pc, pc, r8, lsr #6
mov r0, r0
ENTRY(__switch_to)
add ip, r1, #TI_CPU_SAVE
ldr r3, [r2, #TI_CPU_DOMAIN]!
- stmia ip, {r4 - sl, fp, sp, lr} @ Store most regs on stack
+ stmia ip!, {r4 - sl, fp, sp, lr} @ Store most regs on stack
+#if defined(CONFIG_CPU_XSCALE) && !defined(CONFIG_IWMMXT)
+ mra r4, r5, acc0
+ stmia ip, {r4, r5}
+#endif
mcr p15, 0, r3, c3, c0, 0 @ Set domain register
#ifdef CONFIG_VFP
@ Always disable VFP so we can lazily save/restore the old
VFPFMRX r4, FPEXC
bic r4, r4, #FPEXC_ENABLE
VFPFMXR FPEXC, r4
+#endif
+#if defined(CONFIG_IWMMXT)
+ bl iwmmxt_task_switch
+#elif defined(CONFIG_CPU_XSCALE)
+ add r4, r2, #40 @ cpu_context_save->extra
+ ldmib r4, {r4, r5}
+ mar acc0, r4, r5
#endif
ldmib r2, {r4 - sl, fp, sp, pc} @ Load all regs saved previously