Initial import
[sliver-openvswitch.git] / datapath / linux-2.4 / compat-2.4 / include-mips / asm / system.h
1 #ifndef __ASM_MIPS_SYSTEM_H_WRAPPER
2 #define __ASM_MIPS_SYSTEM_H_WRAPPER 1
3
4 #include_next <asm/system.h>
5
6 #error "Cribbed from linux-2.6/include/asm-mips/system.h but untested."
7
8 #define __HAVE_ARCH_CMPXCHG 1
9
10 static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
11         unsigned long new)
12 {
13         __u32 retval;
14
15         if (cpu_has_llsc && R10000_LLSC_WAR) {
16                 __asm__ __volatile__(
17                 "       .set    push                                    \n"
18                 "       .set    noat                                    \n"
19                 "       .set    mips3                                   \n"
20                 "1:     ll      %0, %2                  # __cmpxchg_u32 \n"
21                 "       bne     %0, %z3, 2f                             \n"
22                 "       .set    mips0                                   \n"
23                 "       move    $1, %z4                                 \n"
24                 "       .set    mips3                                   \n"
25                 "       sc      $1, %1                                  \n"
26                 "       beqzl   $1, 1b                                  \n"
27                 "2:                                                     \n"
28                 "       .set    pop                                     \n"
29                 : "=&r" (retval), "=R" (*m)
30                 : "R" (*m), "Jr" (old), "Jr" (new)
31                 : "memory");
32         } else if (cpu_has_llsc) {
33                 __asm__ __volatile__(
34                 "       .set    push                                    \n"
35                 "       .set    noat                                    \n"
36                 "       .set    mips3                                   \n"
37                 "1:     ll      %0, %2                  # __cmpxchg_u32 \n"
38                 "       bne     %0, %z3, 2f                             \n"
39                 "       .set    mips0                                   \n"
40                 "       move    $1, %z4                                 \n"
41                 "       .set    mips3                                   \n"
42                 "       sc      $1, %1                                  \n"
43                 "       beqz    $1, 3f                                  \n"
44                 "2:                                                     \n"
45                 "       .subsection 2                                   \n"
46                 "3:     b       1b                                      \n"
47                 "       .previous                                       \n"
48                 "       .set    pop                                     \n"
49                 : "=&r" (retval), "=R" (*m)
50                 : "R" (*m), "Jr" (old), "Jr" (new)
51                 : "memory");
52         } else {
53                 unsigned long flags;
54
55                 raw_local_irq_save(flags);
56                 retval = *m;
57                 if (retval == old)
58                         *m = new;
59                 raw_local_irq_restore(flags);   /* implies memory barrier  */
60         }
61
62         smp_llsc_mb();
63
64         return retval;
65 }
66
67 static inline unsigned long __cmpxchg_u32_local(volatile int * m,
68         unsigned long old, unsigned long new)
69 {
70         __u32 retval;
71
72         if (cpu_has_llsc && R10000_LLSC_WAR) {
73                 __asm__ __volatile__(
74                 "       .set    push                                    \n"
75                 "       .set    noat                                    \n"
76                 "       .set    mips3                                   \n"
77                 "1:     ll      %0, %2                  # __cmpxchg_u32 \n"
78                 "       bne     %0, %z3, 2f                             \n"
79                 "       .set    mips0                                   \n"
80                 "       move    $1, %z4                                 \n"
81                 "       .set    mips3                                   \n"
82                 "       sc      $1, %1                                  \n"
83                 "       beqzl   $1, 1b                                  \n"
84                 "2:                                                     \n"
85                 "       .set    pop                                     \n"
86                 : "=&r" (retval), "=R" (*m)
87                 : "R" (*m), "Jr" (old), "Jr" (new)
88                 : "memory");
89         } else if (cpu_has_llsc) {
90                 __asm__ __volatile__(
91                 "       .set    push                                    \n"
92                 "       .set    noat                                    \n"
93                 "       .set    mips3                                   \n"
94                 "1:     ll      %0, %2                  # __cmpxchg_u32 \n"
95                 "       bne     %0, %z3, 2f                             \n"
96                 "       .set    mips0                                   \n"
97                 "       move    $1, %z4                                 \n"
98                 "       .set    mips3                                   \n"
99                 "       sc      $1, %1                                  \n"
100                 "       beqz    $1, 1b                                  \n"
101                 "2:                                                     \n"
102                 "       .set    pop                                     \n"
103                 : "=&r" (retval), "=R" (*m)
104                 : "R" (*m), "Jr" (old), "Jr" (new)
105                 : "memory");
106         } else {
107                 unsigned long flags;
108
109                 local_irq_save(flags);
110                 retval = *m;
111                 if (retval == old)
112                         *m = new;
113                 local_irq_restore(flags);       /* implies memory barrier  */
114         }
115
116         return retval;
117 }
118
119 #ifdef CONFIG_64BIT
120 static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old,
121         unsigned long new)
122 {
123         __u64 retval;
124
125         if (cpu_has_llsc && R10000_LLSC_WAR) {
126                 __asm__ __volatile__(
127                 "       .set    push                                    \n"
128                 "       .set    noat                                    \n"
129                 "       .set    mips3                                   \n"
130                 "1:     lld     %0, %2                  # __cmpxchg_u64 \n"
131                 "       bne     %0, %z3, 2f                             \n"
132                 "       move    $1, %z4                                 \n"
133                 "       scd     $1, %1                                  \n"
134                 "       beqzl   $1, 1b                                  \n"
135                 "2:                                                     \n"
136                 "       .set    pop                                     \n"
137                 : "=&r" (retval), "=R" (*m)
138                 : "R" (*m), "Jr" (old), "Jr" (new)
139                 : "memory");
140         } else if (cpu_has_llsc) {
141                 __asm__ __volatile__(
142                 "       .set    push                                    \n"
143                 "       .set    noat                                    \n"
144                 "       .set    mips3                                   \n"
145                 "1:     lld     %0, %2                  # __cmpxchg_u64 \n"
146                 "       bne     %0, %z3, 2f                             \n"
147                 "       move    $1, %z4                                 \n"
148                 "       scd     $1, %1                                  \n"
149                 "       beqz    $1, 3f                                  \n"
150                 "2:                                                     \n"
151                 "       .subsection 2                                   \n"
152                 "3:     b       1b                                      \n"
153                 "       .previous                                       \n"
154                 "       .set    pop                                     \n"
155                 : "=&r" (retval), "=R" (*m)
156                 : "R" (*m), "Jr" (old), "Jr" (new)
157                 : "memory");
158         } else {
159                 unsigned long flags;
160
161                 raw_local_irq_save(flags);
162                 retval = *m;
163                 if (retval == old)
164                         *m = new;
165                 raw_local_irq_restore(flags);   /* implies memory barrier  */
166         }
167
168         smp_llsc_mb();
169
170         return retval;
171 }
172
173 static inline unsigned long __cmpxchg_u64_local(volatile int * m,
174         unsigned long old, unsigned long new)
175 {
176         __u64 retval;
177
178         if (cpu_has_llsc && R10000_LLSC_WAR) {
179                 __asm__ __volatile__(
180                 "       .set    push                                    \n"
181                 "       .set    noat                                    \n"
182                 "       .set    mips3                                   \n"
183                 "1:     lld     %0, %2                  # __cmpxchg_u64 \n"
184                 "       bne     %0, %z3, 2f                             \n"
185                 "       move    $1, %z4                                 \n"
186                 "       scd     $1, %1                                  \n"
187                 "       beqzl   $1, 1b                                  \n"
188                 "2:                                                     \n"
189                 "       .set    pop                                     \n"
190                 : "=&r" (retval), "=R" (*m)
191                 : "R" (*m), "Jr" (old), "Jr" (new)
192                 : "memory");
193         } else if (cpu_has_llsc) {
194                 __asm__ __volatile__(
195                 "       .set    push                                    \n"
196                 "       .set    noat                                    \n"
197                 "       .set    mips3                                   \n"
198                 "1:     lld     %0, %2                  # __cmpxchg_u64 \n"
199                 "       bne     %0, %z3, 2f                             \n"
200                 "       move    $1, %z4                                 \n"
201                 "       scd     $1, %1                                  \n"
202                 "       beqz    $1, 1b                                  \n"
203                 "2:                                                     \n"
204                 "       .set    pop                                     \n"
205                 : "=&r" (retval), "=R" (*m)
206                 : "R" (*m), "Jr" (old), "Jr" (new)
207                 : "memory");
208         } else {
209                 unsigned long flags;
210
211                 local_irq_save(flags);
212                 retval = *m;
213                 if (retval == old)
214                         *m = new;
215                 local_irq_restore(flags);       /* implies memory barrier  */
216         }
217
218         return retval;
219 }
220
221 #else
222 extern unsigned long __cmpxchg_u64_unsupported_on_32bit_kernels(
223         volatile int * m, unsigned long old, unsigned long new);
224 #define __cmpxchg_u64 __cmpxchg_u64_unsupported_on_32bit_kernels
225 extern unsigned long __cmpxchg_u64_local_unsupported_on_32bit_kernels(
226         volatile int * m, unsigned long old, unsigned long new);
227 #define __cmpxchg_u64_local __cmpxchg_u64_local_unsupported_on_32bit_kernels
228 #endif
229
230 /* This function doesn't exist, so you'll get a linker error
231    if something tries to do an invalid cmpxchg().  */
232 extern void __cmpxchg_called_with_bad_pointer(void);
233
234 static inline unsigned long __cmpxchg(volatile void * ptr, unsigned long old,
235         unsigned long new, int size)
236 {
237         switch (size) {
238         case 4:
239                 return __cmpxchg_u32(ptr, old, new);
240         case 8:
241                 return __cmpxchg_u64(ptr, old, new);
242         }
243         __cmpxchg_called_with_bad_pointer();
244         return old;
245 }
246
247 static inline unsigned long __cmpxchg_local(volatile void * ptr,
248         unsigned long old, unsigned long new, int size)
249 {
250         switch (size) {
251         case 4:
252                 return __cmpxchg_u32_local(ptr, old, new);
253         case 8:
254                 return __cmpxchg_u64_local(ptr, old, new);
255         }
256         __cmpxchg_called_with_bad_pointer();
257         return old;
258 }
259
260 #define cmpxchg(ptr,old,new) \
261         ((__typeof__(*(ptr)))__cmpxchg((ptr), \
262                 (unsigned long)(old), (unsigned long)(new),sizeof(*(ptr))))
263
264 #define cmpxchg_local(ptr,old,new) \
265         ((__typeof__(*(ptr)))__cmpxchg_local((ptr), \
266                 (unsigned long)(old), (unsigned long)(new),sizeof(*(ptr))))
267
268 #endif /* asm/system.h */