linux 2.6.16.38 w/ vs2.0.3-rc1
[linux-2.6.git] / include / asm-mips / stackframe.h
index 53a441c..a8919dc 100644 (file)
@@ -15,7 +15,7 @@
 
 #include <asm/asm.h>
 #include <asm/mipsregs.h>
-#include <asm/offset.h>
+#include <asm/asm-offsets.h>
 
                .macro  SAVE_AT
                .set    push
@@ -26,7 +26,7 @@
 
                .macro  SAVE_TEMP
                mfhi    v1
-#ifdef CONFIG_MIPS32
+#ifdef CONFIG_32BIT
                LONG_S  $8, PT_R8(sp)
                LONG_S  $9, PT_R9(sp)
 #endif
 
 #ifdef CONFIG_SMP
                .macro  get_saved_sp    /* SMP variation */
-#ifdef CONFIG_MIPS32
+#ifdef CONFIG_32BIT
                mfc0    k0, CP0_CONTEXT
                lui     k1, %hi(kernelsp)
                srl     k0, k0, 23
-               sll     k0, k0, 2
                addu    k1, k0
                LONG_L  k1, %lo(kernelsp)(k1)
 #endif
-#ifdef CONFIG_MIPS64
+#if defined(CONFIG_64BIT) && !defined(CONFIG_BUILD_ELF64)
                MFC0    k1, CP0_CONTEXT
                dsra    k1, 23
                lui     k0, %hi(pgd_current)
-               daddiu  k0, %lo(pgd_current)
+               addiu   k0, %lo(pgd_current)
                dsubu   k1, k0
                lui     k0, %hi(kernelsp)
                daddu   k1, k0
                LONG_L  k1, %lo(kernelsp)(k1)
+#endif
+#if defined(CONFIG_64BIT) && defined(CONFIG_BUILD_ELF64)
+               MFC0    k1, CP0_CONTEXT
+               lui     k0, %highest(kernelsp)
+               dsrl    k1, 23
+               daddiu  k0, %higher(kernelsp)
+               dsll    k0, k0, 16
+               daddiu  k0, %hi(kernelsp)
+               dsll    k0, k0, 16
+               daddu   k1, k1, k0
+               LONG_L  k1, %lo(kernelsp)(k1)
 #endif
                .endm
 
                .macro  set_saved_sp stackp temp temp2
-#ifdef CONFIG_MIPS32
+#ifdef CONFIG_32BIT
                mfc0    \temp, CP0_CONTEXT
                srl     \temp, 23
-               sll     \temp, 2
-               LONG_S  \stackp, kernelsp(\temp)
 #endif
-#ifdef CONFIG_MIPS64
+#if defined(CONFIG_64BIT) && !defined(CONFIG_BUILD_ELF64)
                lw      \temp, TI_CPU(gp)
                dsll    \temp, 3
-               lui     \temp2, %hi(kernelsp)
-               daddu   \temp, \temp2
-               LONG_S  \stackp, %lo(kernelsp)(\temp)
 #endif
+#if defined(CONFIG_64BIT) && defined(CONFIG_BUILD_ELF64)
+               MFC0    \temp, CP0_CONTEXT
+               dsrl    \temp, 23
+#endif
+               LONG_S  \stackp, kernelsp(\temp)
                .endm
 #else
                .macro  get_saved_sp    /* Uniprocessor variation */
+#if defined(CONFIG_64BIT) && defined(CONFIG_BUILD_ELF64)
+               lui     k1, %highest(kernelsp)
+               daddiu  k1, %higher(kernelsp)
+               dsll    k1, k1, 16
+               daddiu  k1, %hi(kernelsp)
+               dsll    k1, k1, 16
+#else
                lui     k1, %hi(kernelsp)
+#endif
                LONG_L  k1, %lo(kernelsp)(k1)
                .endm
 
 
                .macro  SAVE_SOME
                .set    push
+               .set    noat
                .set    reorder
                mfc0    k0, CP0_STATUS
                sll     k0, 3           /* extract cu0 bit */
                LONG_S  $6, PT_R6(sp)
                MFC0    v1, CP0_EPC
                LONG_S  $7, PT_R7(sp)
-#ifdef CONFIG_MIPS64
+#ifdef CONFIG_64BIT
                LONG_S  $8, PT_R8(sp)
                LONG_S  $9, PT_R9(sp)
 #endif
 
                .macro  RESTORE_TEMP
                LONG_L  $24, PT_LO(sp)
-#ifdef CONFIG_MIPS32
+#ifdef CONFIG_32BIT
                LONG_L  $8, PT_R8(sp)
                LONG_L  $9, PT_R9(sp)
 #endif
                LONG_L  $31, PT_R31(sp)
                LONG_L  $28, PT_R28(sp)
                LONG_L  $25, PT_R25(sp)
-#ifdef CONFIG_MIPS64
+#ifdef CONFIG_64BIT
                LONG_L  $8, PT_R8(sp)
                LONG_L  $9, PT_R9(sp)
 #endif
                LONG_L  $31, PT_R31(sp)
                LONG_L  $28, PT_R28(sp)
                LONG_L  $25, PT_R25(sp)
-#ifdef CONFIG_MIPS64
+#ifdef CONFIG_64BIT
                LONG_L  $8, PT_R8(sp)
                LONG_L  $9, PT_R9(sp)
 #endif
 
                .macro  RESTORE_ALL
                RESTORE_TEMP
-               RESTORE_AT
                RESTORE_STATIC
+               RESTORE_AT
                RESTORE_SOME
                RESTORE_SP
                .endm
 
                .macro  RESTORE_ALL_AND_RET
                RESTORE_TEMP
-               RESTORE_AT
                RESTORE_STATIC
+               RESTORE_AT
                RESTORE_SOME
                RESTORE_SP_AND_RET
                .endm