linux 2.6.16.38 w/ vs2.0.3-rc1
[linux-2.6.git] / include / asm-x86_64 / system.h
index 6bf170b..dacec59 100644 (file)
@@ -1,12 +1,18 @@
 #ifndef __ASM_SYSTEM_H
 #define __ASM_SYSTEM_H
 
+#include <linux/config.h>
 #include <linux/kernel.h>
 #include <asm/segment.h>
-#include <asm/alternative.h>
 
 #ifdef __KERNEL__
 
+#ifdef CONFIG_SMP
+#define LOCK_PREFIX "lock ; "
+#else
+#define LOCK_PREFIX ""
+#endif
+
 #define __STR(x) #x
 #define STR(x) __STR(x)
 
 #define __RESTORE(reg,offset) "movq (14-" #offset ")*8(%%rsp),%%" #reg "\n\t"
 
 /* frame pointer must be last for get_wchan */
-#define SAVE_CONTEXT    "pushq %%rbp ; movq %%rsi,%%rbp\n\t"
-#define RESTORE_CONTEXT "movq %%rbp,%%rsi ; popq %%rbp\n\t"
+#define SAVE_CONTEXT    "pushf ; pushq %%rbp ; movq %%rsi,%%rbp\n\t"
+#define RESTORE_CONTEXT "movq %%rbp,%%rsi ; popq %%rbp ; popf\n\t"
 
 #define __EXTRA_CLOBBER  \
        ,"rcx","rbx","rdx","r8","r9","r10","r11","r12","r13","r14","r15"
 
+/* Save restore flags to clear handle leaking NT */
 #define switch_to(prev,next,last) \
        asm volatile(SAVE_CONTEXT                                                   \
                     "movq %%rsp,%P[threadrsp](%[prev])\n\t" /* save RSP */       \
@@ -29,7 +36,7 @@
                     "thread_return:\n\t"                                           \
                     "movq %%gs:%P[pda_pcurrent],%%rsi\n\t"                       \
                     "movq %P[thread_info](%%rsi),%%r8\n\t"                       \
-                    LOCK_PREFIX "btr  %[tif_fork],%P[ti_flags](%%r8)\n\t"        \
+                    LOCK "btr  %[tif_fork],%P[ti_flags](%%r8)\n\t"               \
                     "movq %%rax,%%rdi\n\t"                                       \
                     "jc   ret_from_fork\n\t"                                     \
                     RESTORE_CONTEXT                                                \
@@ -64,11 +71,110 @@ extern void load_gs_index(unsigned);
                ".previous"                     \
                : :"r" (value), "r" (0))
 
+#define set_debug(value,register) \
+                __asm__("movq %0,%%db" #register  \
+               : /* no output */ \
+               :"r" ((unsigned long) value))
+
+
+#ifdef __KERNEL__
+struct alt_instr { 
+       __u8 *instr;            /* original instruction */
+       __u8 *replacement;
+       __u8  cpuid;            /* cpuid bit set for replacement */
+       __u8  instrlen;         /* length of original instruction */
+       __u8  replacementlen;   /* length of new instruction, <= instrlen */ 
+       __u8  pad[5];
+}; 
+#endif
+
+/*
+ * Alternative instructions for different CPU types or capabilities.
+ * 
+ * This allows to use optimized instructions even on generic binary
+ * kernels.
+ * 
+ * length of oldinstr must be longer or equal the length of newinstr
+ * It can be padded with nops as needed.
+ * 
+ * For non barrier like inlines please define new variants
+ * without volatile and memory clobber.
+ */
+#define alternative(oldinstr, newinstr, feature)       \
+       asm volatile ("661:\n\t" oldinstr "\n662:\n"                 \
+                     ".section .altinstructions,\"a\"\n"            \
+                     "  .align 8\n"                                   \
+                     "  .quad 661b\n"            /* label */          \
+                     "  .quad 663f\n"            /* new instruction */ \
+                     "  .byte %c0\n"             /* feature bit */    \
+                     "  .byte 662b-661b\n"       /* sourcelen */      \
+                     "  .byte 664f-663f\n"       /* replacementlen */ \
+                     ".previous\n"                                     \
+                     ".section .altinstr_replacement,\"ax\"\n"         \
+                     "663:\n\t" newinstr "\n664:\n"   /* replacement */ \
+                     ".previous" :: "i" (feature) : "memory")  
+
+/*
+ * Alternative inline assembly with input.
+ * 
+ * Peculiarities:
+ * No memory clobber here. 
+ * Argument numbers start with 1.
+ * Best is to use constraints that are fixed size (like (%1) ... "r")
+ * If you use variable sized constraints like "m" or "g" in the 
+ * replacement make sure to pad to the worst case length.
+ */
+#define alternative_input(oldinstr, newinstr, feature, input...)       \
+       asm volatile ("661:\n\t" oldinstr "\n662:\n"                    \
+                     ".section .altinstructions,\"a\"\n"               \
+                     "  .align 8\n"                                    \
+                     "  .quad 661b\n"            /* label */           \
+                     "  .quad 663f\n"            /* new instruction */ \
+                     "  .byte %c0\n"             /* feature bit */     \
+                     "  .byte 662b-661b\n"       /* sourcelen */       \
+                     "  .byte 664f-663f\n"       /* replacementlen */  \
+                     ".previous\n"                                     \
+                     ".section .altinstr_replacement,\"ax\"\n"         \
+                     "663:\n\t" newinstr "\n664:\n"   /* replacement */ \
+                     ".previous" :: "i" (feature), ##input)
+
+/* Like alternative_input, but with a single output argument */
+#define alternative_io(oldinstr, newinstr, feature, output, input...) \
+       asm volatile ("661:\n\t" oldinstr "\n662:\n"                    \
+                     ".section .altinstructions,\"a\"\n"               \
+                     "  .align 8\n"                                    \
+                     "  .quad 661b\n"            /* label */           \
+                     "  .quad 663f\n"            /* new instruction */ \
+                     "  .byte %c[feat]\n"        /* feature bit */     \
+                     "  .byte 662b-661b\n"       /* sourcelen */       \
+                     "  .byte 664f-663f\n"       /* replacementlen */  \
+                     ".previous\n"                                     \
+                     ".section .altinstr_replacement,\"ax\"\n"         \
+                     "663:\n\t" newinstr "\n664:\n"   /* replacement */ \
+                     ".previous" : output : [feat] "i" (feature), ##input)
+
 /*
  * Clear and set 'TS' bit respectively
  */
 #define clts() __asm__ __volatile__ ("clts")
 
+static inline unsigned long __raw_local_save_flags(void)
+{
+       unsigned long flags;
+
+       __asm__ __volatile__(
+               "# __raw_save_flags\n\t"
+               "pushfq ; popq %q0"
+               : "=g" (flags)
+               : /* no input */
+               : "memory"
+       );
+
+       return flags;
+}
+#define raw_local_save_flags(flags) \
+               do { (flags) = __raw_local_save_flags(); } while (0)
+
 static inline unsigned long read_cr0(void)
 { 
        unsigned long cr0;
@@ -240,14 +346,50 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
 #endif
 #define read_barrier_depends() do {} while(0)
 #define set_mb(var, value) do { (void) xchg(&var, value); } while (0)
+#define set_wmb(var, value) do { var = value; wmb(); } while (0)
 
 #define warn_if_not_ulong(x) do { unsigned long foo; (void) (&(x) == &foo); } while (0)
 
-#include <linux/irqflags.h>
+/* interrupt control.. */
+#define local_save_flags(x)    do { warn_if_not_ulong(x); __asm__ __volatile__("# save_flags \n\t pushfq ; popq %q0":"=g" (x): /* no input */ :"memory"); } while (0)
+#define local_irq_restore(x)   __asm__ __volatile__("# restore_flags \n\t pushq %0 ; popfq": /* no output */ :"g" (x):"memory", "cc")
+
+#ifdef CONFIG_X86_VSMP
+/* Interrupt control for VSMP  architecture */
+#define local_irq_disable()    do { unsigned long flags; local_save_flags(flags); local_irq_restore((flags & ~(1 << 9)) | (1 << 18)); } while (0)
+#define local_irq_enable()     do { unsigned long flags; local_save_flags(flags); local_irq_restore((flags | (1 << 9)) & ~(1 << 18)); } while (0)
+
+#define irqs_disabled()                                        \
+({                                                     \
+       unsigned long flags;                            \
+       local_save_flags(flags);                        \
+       (flags & (1<<18)) || !(flags & (1<<9));         \
+})
+
+/* For spinlocks etc */
+#define local_irq_save(x)      do { local_save_flags(x); local_irq_restore((x & ~(1 << 9)) | (1 << 18)); } while (0)
+#else  /* CONFIG_X86_VSMP */
+#define local_irq_disable()    __asm__ __volatile__("cli": : :"memory")
+#define local_irq_enable()     __asm__ __volatile__("sti": : :"memory")
+
+#define irqs_disabled()                        \
+({                                     \
+       unsigned long flags;            \
+       local_save_flags(flags);        \
+       !(flags & (1<<9));              \
+})
+
+/* For spinlocks etc */
+#define local_irq_save(x)      do { warn_if_not_ulong(x); __asm__ __volatile__("# local_irq_save \n\t pushfq ; popq %0 ; cli":"=g" (x): /* no input */ :"memory"); } while (0)
+#endif
+
+/* used in the idle loop; sti takes one instruction cycle to complete */
+#define safe_halt()            __asm__ __volatile__("sti; hlt": : :"memory")
+/* used when interrupts are already enabled or to shutdown the processor */
+#define halt()                 __asm__ __volatile__("hlt": : :"memory")
 
 void cpu_idle_wait(void);
 
 extern unsigned long arch_align_stack(unsigned long sp);
-extern void free_init_pages(char *what, unsigned long begin, unsigned long end);
 
 #endif