b347817522443fc6e9cd84672dee965885f0c2b7
[karo-tx-linux.git] / arch / mips / include / asm / atomic.h
1 /*
2  * Atomic operations that C can't guarantee us.  Useful for
3  * resource counting etc..
4  *
5  * But use these as seldom as possible since they are much more slower
6  * than regular operations.
7  *
8  * This file is subject to the terms and conditions of the GNU General Public
9  * License.  See the file "COPYING" in the main directory of this archive
10  * for more details.
11  *
12  * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
13  */
14 #ifndef _ASM_ATOMIC_H
15 #define _ASM_ATOMIC_H
16
17 #include <linux/irqflags.h>
18 #include <linux/types.h>
19 #include <asm/barrier.h>
20 #include <asm/compiler.h>
21 #include <asm/cpu-features.h>
22 #include <asm/cmpxchg.h>
23 #include <asm/war.h>
24
25 #define ATOMIC_INIT(i)    { (i) }
26
27 /*
28  * atomic_read - read atomic variable
29  * @v: pointer of type atomic_t
30  *
31  * Atomically reads the value of @v.
32  */
33 #define atomic_read(v)          ACCESS_ONCE((v)->counter)
34
35 /*
36  * atomic_set - set atomic variable
37  * @v: pointer of type atomic_t
38  * @i: required value
39  *
40  * Atomically sets the value of @v to @i.
41  */
42 #define atomic_set(v, i)                ((v)->counter = (i))
43
44 #define ATOMIC_OP(op, c_op, asm_op)                                           \
45 static __inline__ void atomic_##op(int i, atomic_t * v)                       \
46 {                                                                             \
47         if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
48                 int temp;                                                     \
49                                                                               \
50                 __asm__ __volatile__(                                         \
51                 "       .set    arch=r4000                              \n"   \
52                 "1:     ll      %0, %1          # atomic_" #op "        \n"   \
53                 "       " #asm_op " %0, %2                              \n"   \
54                 "       sc      %0, %1                                  \n"   \
55                 "       beqzl   %0, 1b                                  \n"   \
56                 "       .set    mips0                                   \n"   \
57                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)          \
58                 : "Ir" (i));                                                  \
59         } else if (kernel_uses_llsc) {                                        \
60                 int temp;                                                     \
61                                                                               \
62                 do {                                                          \
63                         __asm__ __volatile__(                                 \
64                         "       .set    "MIPS_ISA_LEVEL"                \n"   \
65                         "       ll      %0, %1          # atomic_" #op "\n"   \
66                         "       " #asm_op " %0, %2                      \n"   \
67                         "       sc      %0, %1                          \n"   \
68                         "       .set    mips0                           \n"   \
69                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)      \
70                         : "Ir" (i));                                          \
71                 } while (unlikely(!temp));                                    \
72         } else {                                                              \
73                 unsigned long flags;                                          \
74                                                                               \
75                 raw_local_irq_save(flags);                                    \
76                 v->counter c_op i;                                            \
77                 raw_local_irq_restore(flags);                                 \
78         }                                                                     \
79 }
80
81 #define ATOMIC_OP_RETURN(op, c_op, asm_op)                                    \
82 static __inline__ int atomic_##op##_return(int i, atomic_t * v)               \
83 {                                                                             \
84         int result;                                                           \
85                                                                               \
86         smp_mb__before_llsc();                                                \
87                                                                               \
88         if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
89                 int temp;                                                     \
90                                                                               \
91                 __asm__ __volatile__(                                         \
92                 "       .set    arch=r4000                              \n"   \
93                 "1:     ll      %1, %2          # atomic_" #op "_return \n"   \
94                 "       " #asm_op " %0, %1, %3                          \n"   \
95                 "       sc      %0, %2                                  \n"   \
96                 "       beqzl   %0, 1b                                  \n"   \
97                 "       " #asm_op " %0, %1, %3                          \n"   \
98                 "       .set    mips0                                   \n"   \
99                 : "=&r" (result), "=&r" (temp),                               \
100                   "+" GCC_OFF_SMALL_ASM() (v->counter)                        \
101                 : "Ir" (i));                                                  \
102         } else if (kernel_uses_llsc) {                                        \
103                 int temp;                                                     \
104                                                                               \
105                 do {                                                          \
106                         __asm__ __volatile__(                                 \
107                         "       .set    "MIPS_ISA_LEVEL"                \n"   \
108                         "       ll      %1, %2  # atomic_" #op "_return \n"   \
109                         "       " #asm_op " %0, %1, %3                  \n"   \
110                         "       sc      %0, %2                          \n"   \
111                         "       .set    mips0                           \n"   \
112                         : "=&r" (result), "=&r" (temp),                       \
113                           "+" GCC_OFF_SMALL_ASM() (v->counter)                \
114                         : "Ir" (i));                                          \
115                 } while (unlikely(!result));                                  \
116                                                                               \
117                 result = temp; result c_op i;                                 \
118         } else {                                                              \
119                 unsigned long flags;                                          \
120                                                                               \
121                 raw_local_irq_save(flags);                                    \
122                 result = v->counter;                                          \
123                 result c_op i;                                                \
124                 v->counter = result;                                          \
125                 raw_local_irq_restore(flags);                                 \
126         }                                                                     \
127                                                                               \
128         smp_llsc_mb();                                                        \
129                                                                               \
130         return result;                                                        \
131 }
132
133 #define ATOMIC_OPS(op, c_op, asm_op)                                          \
134         ATOMIC_OP(op, c_op, asm_op)                                           \
135         ATOMIC_OP_RETURN(op, c_op, asm_op)
136
137 ATOMIC_OPS(add, +=, addu)
138 ATOMIC_OPS(sub, -=, subu)
139
140 ATOMIC_OP(and, &=, and)
141 ATOMIC_OP(or, |=, or)
142 ATOMIC_OP(xor, ^=, xor)
143
144 #undef ATOMIC_OPS
145 #undef ATOMIC_OP_RETURN
146 #undef ATOMIC_OP
147
148 /*
149  * atomic_sub_if_positive - conditionally subtract integer from atomic variable
150  * @i: integer value to subtract
151  * @v: pointer of type atomic_t
152  *
153  * Atomically test @v and subtract @i if @v is greater or equal than @i.
154  * The function returns the old value of @v minus @i.
155  */
156 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
157 {
158         int result;
159
160         smp_mb__before_llsc();
161
162         if (kernel_uses_llsc && R10000_LLSC_WAR) {
163                 int temp;
164
165                 __asm__ __volatile__(
166                 "       .set    arch=r4000                              \n"
167                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
168                 "       subu    %0, %1, %3                              \n"
169                 "       bltz    %0, 1f                                  \n"
170                 "       sc      %0, %2                                  \n"
171                 "       .set    noreorder                               \n"
172                 "       beqzl   %0, 1b                                  \n"
173                 "        subu   %0, %1, %3                              \n"
174                 "       .set    reorder                                 \n"
175                 "1:                                                     \n"
176                 "       .set    mips0                                   \n"
177                 : "=&r" (result), "=&r" (temp),
178                   "+" GCC_OFF_SMALL_ASM() (v->counter)
179                 : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
180                 : "memory");
181         } else if (kernel_uses_llsc) {
182                 int temp;
183
184                 __asm__ __volatile__(
185                 "       .set    "MIPS_ISA_LEVEL"                        \n"
186                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
187                 "       subu    %0, %1, %3                              \n"
188                 "       bltz    %0, 1f                                  \n"
189                 "       sc      %0, %2                                  \n"
190                 "       .set    noreorder                               \n"
191                 "       beqz    %0, 1b                                  \n"
192                 "        subu   %0, %1, %3                              \n"
193                 "       .set    reorder                                 \n"
194                 "1:                                                     \n"
195                 "       .set    mips0                                   \n"
196                 : "=&r" (result), "=&r" (temp),
197                   "+" GCC_OFF_SMALL_ASM() (v->counter)
198                 : "Ir" (i));
199         } else {
200                 unsigned long flags;
201
202                 raw_local_irq_save(flags);
203                 result = v->counter;
204                 result -= i;
205                 if (result >= 0)
206                         v->counter = result;
207                 raw_local_irq_restore(flags);
208         }
209
210         smp_llsc_mb();
211
212         return result;
213 }
214
215 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
216 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
217
218 /**
219  * __atomic_add_unless - add unless the number is a given value
220  * @v: pointer of type atomic_t
221  * @a: the amount to add to v...
222  * @u: ...unless v is equal to u.
223  *
224  * Atomically adds @a to @v, so long as it was not @u.
225  * Returns the old value of @v.
226  */
227 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
228 {
229         int c, old;
230         c = atomic_read(v);
231         for (;;) {
232                 if (unlikely(c == (u)))
233                         break;
234                 old = atomic_cmpxchg((v), c, c + (a));
235                 if (likely(old == c))
236                         break;
237                 c = old;
238         }
239         return c;
240 }
241
242 #define atomic_dec_return(v) atomic_sub_return(1, (v))
243 #define atomic_inc_return(v) atomic_add_return(1, (v))
244
245 /*
246  * atomic_sub_and_test - subtract value from variable and test result
247  * @i: integer value to subtract
248  * @v: pointer of type atomic_t
249  *
250  * Atomically subtracts @i from @v and returns
251  * true if the result is zero, or false for all
252  * other cases.
253  */
254 #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
255
256 /*
257  * atomic_inc_and_test - increment and test
258  * @v: pointer of type atomic_t
259  *
260  * Atomically increments @v by 1
261  * and returns true if the result is zero, or false for all
262  * other cases.
263  */
264 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
265
266 /*
267  * atomic_dec_and_test - decrement by 1 and test
268  * @v: pointer of type atomic_t
269  *
270  * Atomically decrements @v by 1 and
271  * returns true if the result is 0, or false for all other
272  * cases.
273  */
274 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
275
276 /*
277  * atomic_dec_if_positive - decrement by 1 if old value positive
278  * @v: pointer of type atomic_t
279  */
280 #define atomic_dec_if_positive(v)       atomic_sub_if_positive(1, v)
281
282 /*
283  * atomic_inc - increment atomic variable
284  * @v: pointer of type atomic_t
285  *
286  * Atomically increments @v by 1.
287  */
288 #define atomic_inc(v) atomic_add(1, (v))
289
290 /*
291  * atomic_dec - decrement and test
292  * @v: pointer of type atomic_t
293  *
294  * Atomically decrements @v by 1.
295  */
296 #define atomic_dec(v) atomic_sub(1, (v))
297
298 /*
299  * atomic_add_negative - add and test if negative
300  * @v: pointer of type atomic_t
301  * @i: integer value to add
302  *
303  * Atomically adds @i to @v and returns true
304  * if the result is negative, or false when
305  * result is greater than or equal to zero.
306  */
307 #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
308
309 #ifdef CONFIG_64BIT
310
311 #define ATOMIC64_INIT(i)    { (i) }
312
313 /*
314  * atomic64_read - read atomic variable
315  * @v: pointer of type atomic64_t
316  *
317  */
318 #define atomic64_read(v)        ACCESS_ONCE((v)->counter)
319
320 /*
321  * atomic64_set - set atomic variable
322  * @v: pointer of type atomic64_t
323  * @i: required value
324  */
325 #define atomic64_set(v, i)      ((v)->counter = (i))
326
327 #define ATOMIC64_OP(op, c_op, asm_op)                                         \
328 static __inline__ void atomic64_##op(long i, atomic64_t * v)                  \
329 {                                                                             \
330         if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
331                 long temp;                                                    \
332                                                                               \
333                 __asm__ __volatile__(                                         \
334                 "       .set    arch=r4000                              \n"   \
335                 "1:     lld     %0, %1          # atomic64_" #op "      \n"   \
336                 "       " #asm_op " %0, %2                              \n"   \
337                 "       scd     %0, %1                                  \n"   \
338                 "       beqzl   %0, 1b                                  \n"   \
339                 "       .set    mips0                                   \n"   \
340                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)          \
341                 : "Ir" (i));                                                  \
342         } else if (kernel_uses_llsc) {                                        \
343                 long temp;                                                    \
344                                                                               \
345                 do {                                                          \
346                         __asm__ __volatile__(                                 \
347                         "       .set    "MIPS_ISA_LEVEL"                \n"   \
348                         "       lld     %0, %1          # atomic64_" #op "\n" \
349                         "       " #asm_op " %0, %2                      \n"   \
350                         "       scd     %0, %1                          \n"   \
351                         "       .set    mips0                           \n"   \
352                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)      \
353                         : "Ir" (i));                                          \
354                 } while (unlikely(!temp));                                    \
355         } else {                                                              \
356                 unsigned long flags;                                          \
357                                                                               \
358                 raw_local_irq_save(flags);                                    \
359                 v->counter c_op i;                                            \
360                 raw_local_irq_restore(flags);                                 \
361         }                                                                     \
362 }
363
364 #define ATOMIC64_OP_RETURN(op, c_op, asm_op)                                  \
365 static __inline__ long atomic64_##op##_return(long i, atomic64_t * v)         \
366 {                                                                             \
367         long result;                                                          \
368                                                                               \
369         smp_mb__before_llsc();                                                \
370                                                                               \
371         if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
372                 long temp;                                                    \
373                                                                               \
374                 __asm__ __volatile__(                                         \
375                 "       .set    arch=r4000                              \n"   \
376                 "1:     lld     %1, %2          # atomic64_" #op "_return\n"  \
377                 "       " #asm_op " %0, %1, %3                          \n"   \
378                 "       scd     %0, %2                                  \n"   \
379                 "       beqzl   %0, 1b                                  \n"   \
380                 "       " #asm_op " %0, %1, %3                          \n"   \
381                 "       .set    mips0                                   \n"   \
382                 : "=&r" (result), "=&r" (temp),                               \
383                   "+" GCC_OFF_SMALL_ASM() (v->counter)                        \
384                 : "Ir" (i));                                                  \
385         } else if (kernel_uses_llsc) {                                        \
386                 long temp;                                                    \
387                                                                               \
388                 do {                                                          \
389                         __asm__ __volatile__(                                 \
390                         "       .set    "MIPS_ISA_LEVEL"                \n"   \
391                         "       lld     %1, %2  # atomic64_" #op "_return\n"  \
392                         "       " #asm_op " %0, %1, %3                  \n"   \
393                         "       scd     %0, %2                          \n"   \
394                         "       .set    mips0                           \n"   \
395                         : "=&r" (result), "=&r" (temp),                       \
396                           "=" GCC_OFF_SMALL_ASM() (v->counter)                \
397                         : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)          \
398                         : "memory");                                          \
399                 } while (unlikely(!result));                                  \
400                                                                               \
401                 result = temp; result c_op i;                                 \
402         } else {                                                              \
403                 unsigned long flags;                                          \
404                                                                               \
405                 raw_local_irq_save(flags);                                    \
406                 result = v->counter;                                          \
407                 result c_op i;                                                \
408                 v->counter = result;                                          \
409                 raw_local_irq_restore(flags);                                 \
410         }                                                                     \
411                                                                               \
412         smp_llsc_mb();                                                        \
413                                                                               \
414         return result;                                                        \
415 }
416
417 #define ATOMIC64_OPS(op, c_op, asm_op)                                        \
418         ATOMIC64_OP(op, c_op, asm_op)                                         \
419         ATOMIC64_OP_RETURN(op, c_op, asm_op)
420
421 ATOMIC64_OPS(add, +=, daddu)
422 ATOMIC64_OPS(sub, -=, dsubu)
423 ATOMIC64_OP(and, &=, and)
424 ATOMIC64_OP(or, |=, or)
425 ATOMIC64_OP(xor, ^=, xor)
426
427 #undef ATOMIC64_OPS
428 #undef ATOMIC64_OP_RETURN
429 #undef ATOMIC64_OP
430
431 /*
432  * atomic64_sub_if_positive - conditionally subtract integer from atomic
433  *                            variable
434  * @i: integer value to subtract
435  * @v: pointer of type atomic64_t
436  *
437  * Atomically test @v and subtract @i if @v is greater or equal than @i.
438  * The function returns the old value of @v minus @i.
439  */
440 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
441 {
442         long result;
443
444         smp_mb__before_llsc();
445
446         if (kernel_uses_llsc && R10000_LLSC_WAR) {
447                 long temp;
448
449                 __asm__ __volatile__(
450                 "       .set    arch=r4000                              \n"
451                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
452                 "       dsubu   %0, %1, %3                              \n"
453                 "       bltz    %0, 1f                                  \n"
454                 "       scd     %0, %2                                  \n"
455                 "       .set    noreorder                               \n"
456                 "       beqzl   %0, 1b                                  \n"
457                 "        dsubu  %0, %1, %3                              \n"
458                 "       .set    reorder                                 \n"
459                 "1:                                                     \n"
460                 "       .set    mips0                                   \n"
461                 : "=&r" (result), "=&r" (temp),
462                   "=" GCC_OFF_SMALL_ASM() (v->counter)
463                 : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
464                 : "memory");
465         } else if (kernel_uses_llsc) {
466                 long temp;
467
468                 __asm__ __volatile__(
469                 "       .set    "MIPS_ISA_LEVEL"                        \n"
470                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
471                 "       dsubu   %0, %1, %3                              \n"
472                 "       bltz    %0, 1f                                  \n"
473                 "       scd     %0, %2                                  \n"
474                 "       .set    noreorder                               \n"
475                 "       beqz    %0, 1b                                  \n"
476                 "        dsubu  %0, %1, %3                              \n"
477                 "       .set    reorder                                 \n"
478                 "1:                                                     \n"
479                 "       .set    mips0                                   \n"
480                 : "=&r" (result), "=&r" (temp),
481                   "+" GCC_OFF_SMALL_ASM() (v->counter)
482                 : "Ir" (i));
483         } else {
484                 unsigned long flags;
485
486                 raw_local_irq_save(flags);
487                 result = v->counter;
488                 result -= i;
489                 if (result >= 0)
490                         v->counter = result;
491                 raw_local_irq_restore(flags);
492         }
493
494         smp_llsc_mb();
495
496         return result;
497 }
498
499 #define atomic64_cmpxchg(v, o, n) \
500         ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
501 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
502
503 /**
504  * atomic64_add_unless - add unless the number is a given value
505  * @v: pointer of type atomic64_t
506  * @a: the amount to add to v...
507  * @u: ...unless v is equal to u.
508  *
509  * Atomically adds @a to @v, so long as it was not @u.
510  * Returns true iff @v was not @u.
511  */
512 static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
513 {
514         long c, old;
515         c = atomic64_read(v);
516         for (;;) {
517                 if (unlikely(c == (u)))
518                         break;
519                 old = atomic64_cmpxchg((v), c, c + (a));
520                 if (likely(old == c))
521                         break;
522                 c = old;
523         }
524         return c != (u);
525 }
526
527 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
528
529 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
530 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
531
532 /*
533  * atomic64_sub_and_test - subtract value from variable and test result
534  * @i: integer value to subtract
535  * @v: pointer of type atomic64_t
536  *
537  * Atomically subtracts @i from @v and returns
538  * true if the result is zero, or false for all
539  * other cases.
540  */
541 #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
542
543 /*
544  * atomic64_inc_and_test - increment and test
545  * @v: pointer of type atomic64_t
546  *
547  * Atomically increments @v by 1
548  * and returns true if the result is zero, or false for all
549  * other cases.
550  */
551 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
552
553 /*
554  * atomic64_dec_and_test - decrement by 1 and test
555  * @v: pointer of type atomic64_t
556  *
557  * Atomically decrements @v by 1 and
558  * returns true if the result is 0, or false for all other
559  * cases.
560  */
561 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
562
563 /*
564  * atomic64_dec_if_positive - decrement by 1 if old value positive
565  * @v: pointer of type atomic64_t
566  */
567 #define atomic64_dec_if_positive(v)     atomic64_sub_if_positive(1, v)
568
569 /*
570  * atomic64_inc - increment atomic variable
571  * @v: pointer of type atomic64_t
572  *
573  * Atomically increments @v by 1.
574  */
575 #define atomic64_inc(v) atomic64_add(1, (v))
576
577 /*
578  * atomic64_dec - decrement and test
579  * @v: pointer of type atomic64_t
580  *
581  * Atomically decrements @v by 1.
582  */
583 #define atomic64_dec(v) atomic64_sub(1, (v))
584
585 /*
586  * atomic64_add_negative - add and test if negative
587  * @v: pointer of type atomic64_t
588  * @i: integer value to add
589  *
590  * Atomically adds @i to @v and returns true
591  * if the result is negative, or false when
592  * result is greater than or equal to zero.
593  */
594 #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
595
596 #endif /* CONFIG_64BIT */
597
598 #endif /* _ASM_ATOMIC_H */