This commit was manufactured by cvs2svn to create branch 'vserver'.
[linux-2.6.git] / include / asm-sh64 / system.h
1 #ifndef __ASM_SH64_SYSTEM_H
2 #define __ASM_SH64_SYSTEM_H
3
4 /*
5  * This file is subject to the terms and conditions of the GNU General Public
6  * License.  See the file "COPYING" in the main directory of this archive
7  * for more details.
8  *
9  * include/asm-sh64/system.h
10  *
11  * Copyright (C) 2000, 2001  Paolo Alberelli
12  * Copyright (C) 2003  Paul Mundt
13  * Copyright (C) 2004  Richard Curnow
14  *
15  */
16
17 #include <linux/config.h>
18 #include <linux/kernel.h>
19 #include <asm/registers.h>
20 #include <asm/processor.h>
21
22 /*
23  *      switch_to() should switch tasks to task nr n, first
24  */
25
26 typedef struct {
27         unsigned long seg;
28 } mm_segment_t;
29
30 extern struct task_struct *sh64_switch_to(struct task_struct *prev,
31                                           struct thread_struct *prev_thread,
32                                           struct task_struct *next,
33                                           struct thread_struct *next_thread);
34
35 #define switch_to(prev,next,last) \
36         do {\
37                 if (last_task_used_math != next) {\
38                         struct pt_regs *regs = next->thread.uregs;\
39                         if (regs) regs->sr |= SR_FD;\
40                 }\
41                 last = sh64_switch_to(prev, &prev->thread, next, &next->thread);\
42         } while(0)
43
44 #define nop() __asm__ __volatile__ ("nop")
45
46 #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
47
48 #define tas(ptr) (xchg((ptr), 1))
49
50 extern void __xchg_called_with_bad_pointer(void);
51
52 #define mb()    __asm__ __volatile__ ("synco": : :"memory")
53 #define rmb()   mb()
54 #define wmb()   __asm__ __volatile__ ("synco": : :"memory")
55 #define read_barrier_depends()  do { } while (0)
56
57 #ifdef CONFIG_SMP
58 #define smp_mb()        mb()
59 #define smp_rmb()       rmb()
60 #define smp_wmb()       wmb()
61 #define smp_read_barrier_depends()      read_barrier_depends()
62 #else
63 #define smp_mb()        barrier()
64 #define smp_rmb()       barrier()
65 #define smp_wmb()       barrier()
66 #define smp_read_barrier_depends()      do { } while (0)
67 #endif /* CONFIG_SMP */
68
69 #define set_rmb(var, value) do { xchg(&var, value); } while (0)
70 #define set_mb(var, value) set_rmb(var, value)
71 #define set_wmb(var, value) do { var = value; wmb(); } while (0)
72
73 /* Interrupt Control */
74 #ifndef HARD_CLI
75 #define SR_MASK_L 0x000000f0L
76 #define SR_MASK_LL 0x00000000000000f0LL
77 #else
78 #define SR_MASK_L 0x10000000L
79 #define SR_MASK_LL 0x0000000010000000LL
80 #endif
81
82 static __inline__ void local_irq_enable(void)
83 {
84         /* cli/sti based on SR.BL */
85         unsigned long long __dummy0, __dummy1=~SR_MASK_LL;
86
87         __asm__ __volatile__("getcon    " __SR ", %0\n\t"
88                              "and       %0, %1, %0\n\t"
89                              "putcon    %0, " __SR "\n\t"
90                              : "=&r" (__dummy0)
91                              : "r" (__dummy1));
92 }
93
94 static __inline__ void local_irq_disable(void)
95 {
96         /* cli/sti based on SR.BL */
97         unsigned long long __dummy0, __dummy1=SR_MASK_LL;
98         __asm__ __volatile__("getcon    " __SR ", %0\n\t"
99                              "or        %0, %1, %0\n\t"
100                              "putcon    %0, " __SR "\n\t"
101                              : "=&r" (__dummy0)
102                              : "r" (__dummy1));
103 }
104
105 #define local_save_flags(x)                                             \
106 (__extension__ ({       unsigned long long __dummy=SR_MASK_LL;          \
107         __asm__ __volatile__(                                           \
108                 "getcon " __SR ", %0\n\t"                               \
109                 "and    %0, %1, %0"                                     \
110                 : "=&r" (x)                                             \
111                 : "r" (__dummy));}))
112
113 #define local_irq_save(x)                                               \
114 (__extension__ ({       unsigned long long __d2=SR_MASK_LL, __d1;       \
115         __asm__ __volatile__(                                           \
116                 "getcon " __SR ", %1\n\t"                               \
117                 "or     %1, r63, %0\n\t"                                \
118                 "or     %1, %2, %1\n\t"                                 \
119                 "putcon %1, " __SR "\n\t"                               \
120                 "and    %0, %2, %0"                                     \
121                 : "=&r" (x), "=&r" (__d1)                               \
122                 : "r" (__d2));}));
123
124 #define local_irq_restore(x) do {                                       \
125         if ( ((x) & SR_MASK_L) == 0 )           /* dropping to 0 ? */   \
126                 local_irq_enable();             /* yes...re-enable */   \
127 } while (0)
128
129 #define irqs_disabled()                 \
130 ({                                      \
131         unsigned long flags;            \
132         local_save_flags(flags);        \
133         (flags != 0);                   \
134 })
135
136 extern __inline__ unsigned long xchg_u32(volatile int * m, unsigned long val)
137 {
138         unsigned long flags, retval;
139
140         local_irq_save(flags);
141         retval = *m;
142         *m = val;
143         local_irq_restore(flags);
144         return retval;
145 }
146
147 extern __inline__ unsigned long xchg_u8(volatile unsigned char * m, unsigned long val)
148 {
149         unsigned long flags, retval;
150
151         local_irq_save(flags);
152         retval = *m;
153         *m = val & 0xff;
154         local_irq_restore(flags);
155         return retval;
156 }
157
158 static __inline__ unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
159 {
160         switch (size) {
161         case 4:
162                 return xchg_u32(ptr, x);
163                 break;
164         case 1:
165                 return xchg_u8(ptr, x);
166                 break;
167         }
168         __xchg_called_with_bad_pointer();
169         return x;
170 }
171
172 /* XXX
173  * disable hlt during certain critical i/o operations
174  */
175 #define HAVE_DISABLE_HLT
176 void disable_hlt(void);
177 void enable_hlt(void);
178
179
180 #define smp_mb()        barrier()
181 #define smp_rmb()       barrier()
182 #define smp_wmb()       barrier()
183
184 #ifdef CONFIG_SH_ALPHANUMERIC
185 /* This is only used for debugging. */
186 extern void print_seg(char *file,int line);
187 #define PLS() print_seg(__FILE__,__LINE__)
188 #else   /* CONFIG_SH_ALPHANUMERIC */
189 #define PLS()
190 #endif  /* CONFIG_SH_ALPHANUMERIC */
191
192 #define PL() printk("@ <%s,%s:%d>\n",__FILE__,__FUNCTION__,__LINE__)
193
194 #endif /* __ASM_SH64_SYSTEM_H */