]> git.kernelconcepts.de Git - karo-tx-linux.git/blob - include/asm-mips/bitops.h
Merge branch 'for-linus' of master.kernel.org:/home/rmk/linux-2.6-arm
[karo-tx-linux.git] / include / asm-mips / bitops.h
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (c) 1994 - 1997, 99, 2000, 06, 07  Ralf Baechle (ralf@linux-mips.org)
7  * Copyright (c) 1999, 2000  Silicon Graphics, Inc.
8  */
9 #ifndef _ASM_BITOPS_H
10 #define _ASM_BITOPS_H
11
12 #include <linux/compiler.h>
13 #include <linux/irqflags.h>
14 #include <linux/types.h>
15 #include <asm/barrier.h>
16 #include <asm/bug.h>
17 #include <asm/byteorder.h>              /* sigh ... */
18 #include <asm/cpu-features.h>
19 #include <asm/sgidefs.h>
20 #include <asm/war.h>
21
22 #if (_MIPS_SZLONG == 32)
23 #define SZLONG_LOG 5
24 #define SZLONG_MASK 31UL
25 #define __LL            "ll     "
26 #define __SC            "sc     "
27 #define __INS           "ins    "
28 #define __EXT           "ext    "
29 #elif (_MIPS_SZLONG == 64)
30 #define SZLONG_LOG 6
31 #define SZLONG_MASK 63UL
32 #define __LL            "lld    "
33 #define __SC            "scd    "
34 #define __INS           "dins    "
35 #define __EXT           "dext    "
36 #endif
37
38 /*
39  * clear_bit() doesn't provide any barrier for the compiler.
40  */
41 #define smp_mb__before_clear_bit()      smp_mb()
42 #define smp_mb__after_clear_bit()       smp_mb()
43
44 /*
45  * set_bit - Atomically set a bit in memory
46  * @nr: the bit to set
47  * @addr: the address to start counting from
48  *
49  * This function is atomic and may not be reordered.  See __set_bit()
50  * if you do not require the atomic guarantees.
51  * Note that @nr may be almost arbitrarily large; this function is not
52  * restricted to acting on a single-word quantity.
53  */
54 static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
55 {
56         unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
57         unsigned short bit = nr & SZLONG_MASK;
58         unsigned long temp;
59
60         if (cpu_has_llsc && R10000_LLSC_WAR) {
61                 __asm__ __volatile__(
62                 "       .set    mips3                                   \n"
63                 "1:     " __LL "%0, %1                  # set_bit       \n"
64                 "       or      %0, %2                                  \n"
65                 "       " __SC  "%0, %1                                 \n"
66                 "       beqzl   %0, 1b                                  \n"
67                 "       .set    mips0                                   \n"
68                 : "=&r" (temp), "=m" (*m)
69                 : "ir" (1UL << bit), "m" (*m));
70 #ifdef CONFIG_CPU_MIPSR2
71         } else if (__builtin_constant_p(bit)) {
72                 __asm__ __volatile__(
73                 "1:     " __LL "%0, %1                  # set_bit       \n"
74                 "       " __INS "%0, %4, %2, 1                          \n"
75                 "       " __SC "%0, %1                                  \n"
76                 "       beqz    %0, 2f                                  \n"
77                 "       .subsection 2                                   \n"
78                 "2:     b       1b                                      \n"
79                 "       .previous                                       \n"
80                 : "=&r" (temp), "=m" (*m)
81                 : "ir" (bit), "m" (*m), "r" (~0));
82 #endif /* CONFIG_CPU_MIPSR2 */
83         } else if (cpu_has_llsc) {
84                 __asm__ __volatile__(
85                 "       .set    mips3                                   \n"
86                 "1:     " __LL "%0, %1                  # set_bit       \n"
87                 "       or      %0, %2                                  \n"
88                 "       " __SC  "%0, %1                                 \n"
89                 "       beqz    %0, 2f                                  \n"
90                 "       .subsection 2                                   \n"
91                 "2:     b       1b                                      \n"
92                 "       .previous                                       \n"
93                 "       .set    mips0                                   \n"
94                 : "=&r" (temp), "=m" (*m)
95                 : "ir" (1UL << bit), "m" (*m));
96         } else {
97                 volatile unsigned long *a = addr;
98                 unsigned long mask;
99                 unsigned long flags;
100
101                 a += nr >> SZLONG_LOG;
102                 mask = 1UL << bit;
103                 raw_local_irq_save(flags);
104                 *a |= mask;
105                 raw_local_irq_restore(flags);
106         }
107 }
108
109 /*
110  * clear_bit - Clears a bit in memory
111  * @nr: Bit to clear
112  * @addr: Address to start counting from
113  *
114  * clear_bit() is atomic and may not be reordered.  However, it does
115  * not contain a memory barrier, so if it is used for locking purposes,
116  * you should call smp_mb__before_clear_bit() and/or smp_mb__after_clear_bit()
117  * in order to ensure changes are visible on other processors.
118  */
119 static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
120 {
121         unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
122         unsigned short bit = nr & SZLONG_MASK;
123         unsigned long temp;
124
125         if (cpu_has_llsc && R10000_LLSC_WAR) {
126                 __asm__ __volatile__(
127                 "       .set    mips3                                   \n"
128                 "1:     " __LL "%0, %1                  # clear_bit     \n"
129                 "       and     %0, %2                                  \n"
130                 "       " __SC "%0, %1                                  \n"
131                 "       beqzl   %0, 1b                                  \n"
132                 "       .set    mips0                                   \n"
133                 : "=&r" (temp), "=m" (*m)
134                 : "ir" (~(1UL << bit)), "m" (*m));
135 #ifdef CONFIG_CPU_MIPSR2
136         } else if (__builtin_constant_p(bit)) {
137                 __asm__ __volatile__(
138                 "1:     " __LL "%0, %1                  # clear_bit     \n"
139                 "       " __INS "%0, $0, %2, 1                          \n"
140                 "       " __SC "%0, %1                                  \n"
141                 "       beqz    %0, 2f                                  \n"
142                 "       .subsection 2                                   \n"
143                 "2:     b       1b                                      \n"
144                 "       .previous                                       \n"
145                 : "=&r" (temp), "=m" (*m)
146                 : "ir" (bit), "m" (*m));
147 #endif /* CONFIG_CPU_MIPSR2 */
148         } else if (cpu_has_llsc) {
149                 __asm__ __volatile__(
150                 "       .set    mips3                                   \n"
151                 "1:     " __LL "%0, %1                  # clear_bit     \n"
152                 "       and     %0, %2                                  \n"
153                 "       " __SC "%0, %1                                  \n"
154                 "       beqz    %0, 2f                                  \n"
155                 "       .subsection 2                                   \n"
156                 "2:     b       1b                                      \n"
157                 "       .previous                                       \n"
158                 "       .set    mips0                                   \n"
159                 : "=&r" (temp), "=m" (*m)
160                 : "ir" (~(1UL << bit)), "m" (*m));
161         } else {
162                 volatile unsigned long *a = addr;
163                 unsigned long mask;
164                 unsigned long flags;
165
166                 a += nr >> SZLONG_LOG;
167                 mask = 1UL << bit;
168                 raw_local_irq_save(flags);
169                 *a &= ~mask;
170                 raw_local_irq_restore(flags);
171         }
172 }
173
174 /*
175  * change_bit - Toggle a bit in memory
176  * @nr: Bit to change
177  * @addr: Address to start counting from
178  *
179  * change_bit() is atomic and may not be reordered.
180  * Note that @nr may be almost arbitrarily large; this function is not
181  * restricted to acting on a single-word quantity.
182  */
183 static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
184 {
185         unsigned short bit = nr & SZLONG_MASK;
186
187         if (cpu_has_llsc && R10000_LLSC_WAR) {
188                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
189                 unsigned long temp;
190
191                 __asm__ __volatile__(
192                 "       .set    mips3                           \n"
193                 "1:     " __LL "%0, %1          # change_bit    \n"
194                 "       xor     %0, %2                          \n"
195                 "       " __SC  "%0, %1                         \n"
196                 "       beqzl   %0, 1b                          \n"
197                 "       .set    mips0                           \n"
198                 : "=&r" (temp), "=m" (*m)
199                 : "ir" (1UL << bit), "m" (*m));
200         } else if (cpu_has_llsc) {
201                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
202                 unsigned long temp;
203
204                 __asm__ __volatile__(
205                 "       .set    mips3                           \n"
206                 "1:     " __LL "%0, %1          # change_bit    \n"
207                 "       xor     %0, %2                          \n"
208                 "       " __SC  "%0, %1                         \n"
209                 "       beqz    %0, 2f                          \n"
210                 "       .subsection 2                           \n"
211                 "2:     b       1b                              \n"
212                 "       .previous                               \n"
213                 "       .set    mips0                           \n"
214                 : "=&r" (temp), "=m" (*m)
215                 : "ir" (1UL << bit), "m" (*m));
216         } else {
217                 volatile unsigned long *a = addr;
218                 unsigned long mask;
219                 unsigned long flags;
220
221                 a += nr >> SZLONG_LOG;
222                 mask = 1UL << bit;
223                 raw_local_irq_save(flags);
224                 *a ^= mask;
225                 raw_local_irq_restore(flags);
226         }
227 }
228
229 /*
230  * test_and_set_bit - Set a bit and return its old value
231  * @nr: Bit to set
232  * @addr: Address to count from
233  *
234  * This operation is atomic and cannot be reordered.
235  * It also implies a memory barrier.
236  */
237 static inline int test_and_set_bit(unsigned long nr,
238         volatile unsigned long *addr)
239 {
240         unsigned short bit = nr & SZLONG_MASK;
241
242         if (cpu_has_llsc && R10000_LLSC_WAR) {
243                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
244                 unsigned long temp, res;
245
246                 __asm__ __volatile__(
247                 "       .set    mips3                                   \n"
248                 "1:     " __LL "%0, %1          # test_and_set_bit      \n"
249                 "       or      %2, %0, %3                              \n"
250                 "       " __SC  "%2, %1                                 \n"
251                 "       beqzl   %2, 1b                                  \n"
252                 "       and     %2, %0, %3                              \n"
253                 "       .set    mips0                                   \n"
254                 : "=&r" (temp), "=m" (*m), "=&r" (res)
255                 : "r" (1UL << bit), "m" (*m)
256                 : "memory");
257
258                 return res != 0;
259         } else if (cpu_has_llsc) {
260                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
261                 unsigned long temp, res;
262
263                 __asm__ __volatile__(
264                 "       .set    push                                    \n"
265                 "       .set    noreorder                               \n"
266                 "       .set    mips3                                   \n"
267                 "1:     " __LL "%0, %1          # test_and_set_bit      \n"
268                 "       or      %2, %0, %3                              \n"
269                 "       " __SC  "%2, %1                                 \n"
270                 "       beqz    %2, 2f                                  \n"
271                 "        and    %2, %0, %3                              \n"
272                 "       .subsection 2                                   \n"
273                 "2:     b       1b                                      \n"
274                 "        nop                                            \n"
275                 "       .previous                                       \n"
276                 "       .set    pop                                     \n"
277                 : "=&r" (temp), "=m" (*m), "=&r" (res)
278                 : "r" (1UL << bit), "m" (*m)
279                 : "memory");
280
281                 return res != 0;
282         } else {
283                 volatile unsigned long *a = addr;
284                 unsigned long mask;
285                 int retval;
286                 unsigned long flags;
287
288                 a += nr >> SZLONG_LOG;
289                 mask = 1UL << bit;
290                 raw_local_irq_save(flags);
291                 retval = (mask & *a) != 0;
292                 *a |= mask;
293                 raw_local_irq_restore(flags);
294
295                 return retval;
296         }
297
298         smp_mb();
299 }
300
301 /*
302  * test_and_clear_bit - Clear a bit and return its old value
303  * @nr: Bit to clear
304  * @addr: Address to count from
305  *
306  * This operation is atomic and cannot be reordered.
307  * It also implies a memory barrier.
308  */
309 static inline int test_and_clear_bit(unsigned long nr,
310         volatile unsigned long *addr)
311 {
312         unsigned short bit = nr & SZLONG_MASK;
313
314         if (cpu_has_llsc && R10000_LLSC_WAR) {
315                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
316                 unsigned long temp, res;
317
318                 __asm__ __volatile__(
319                 "       .set    mips3                                   \n"
320                 "1:     " __LL  "%0, %1         # test_and_clear_bit    \n"
321                 "       or      %2, %0, %3                              \n"
322                 "       xor     %2, %3                                  \n"
323                 "       " __SC  "%2, %1                                 \n"
324                 "       beqzl   %2, 1b                                  \n"
325                 "       and     %2, %0, %3                              \n"
326                 "       .set    mips0                                   \n"
327                 : "=&r" (temp), "=m" (*m), "=&r" (res)
328                 : "r" (1UL << bit), "m" (*m)
329                 : "memory");
330
331                 return res != 0;
332 #ifdef CONFIG_CPU_MIPSR2
333         } else if (__builtin_constant_p(nr)) {
334                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
335                 unsigned long temp, res;
336
337                 __asm__ __volatile__(
338                 "1:     " __LL  "%0, %1         # test_and_clear_bit    \n"
339                 "       " __EXT "%2, %0, %3, 1                          \n"
340                 "       " __INS "%0, $0, %3, 1                          \n"
341                 "       " __SC  "%0, %1                                 \n"
342                 "       beqz    %0, 2f                                  \n"
343                 "       .subsection 2                                   \n"
344                 "2:     b       1b                                      \n"
345                 "       .previous                                       \n"
346                 : "=&r" (temp), "=m" (*m), "=&r" (res)
347                 : "ri" (bit), "m" (*m)
348                 : "memory");
349
350                 return res;
351 #endif
352         } else if (cpu_has_llsc) {
353                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
354                 unsigned long temp, res;
355
356                 __asm__ __volatile__(
357                 "       .set    push                                    \n"
358                 "       .set    noreorder                               \n"
359                 "       .set    mips3                                   \n"
360                 "1:     " __LL  "%0, %1         # test_and_clear_bit    \n"
361                 "       or      %2, %0, %3                              \n"
362                 "       xor     %2, %3                                  \n"
363                 "       " __SC  "%2, %1                                 \n"
364                 "       beqz    %2, 2f                                  \n"
365                 "        and    %2, %0, %3                              \n"
366                 "       .subsection 2                                   \n"
367                 "2:     b       1b                                      \n"
368                 "        nop                                            \n"
369                 "       .previous                                       \n"
370                 "       .set    pop                                     \n"
371                 : "=&r" (temp), "=m" (*m), "=&r" (res)
372                 : "r" (1UL << bit), "m" (*m)
373                 : "memory");
374
375                 return res != 0;
376         } else {
377                 volatile unsigned long *a = addr;
378                 unsigned long mask;
379                 int retval;
380                 unsigned long flags;
381
382                 a += nr >> SZLONG_LOG;
383                 mask = 1UL << bit;
384                 raw_local_irq_save(flags);
385                 retval = (mask & *a) != 0;
386                 *a &= ~mask;
387                 raw_local_irq_restore(flags);
388
389                 return retval;
390         }
391
392         smp_mb();
393 }
394
395 /*
396  * test_and_change_bit - Change a bit and return its old value
397  * @nr: Bit to change
398  * @addr: Address to count from
399  *
400  * This operation is atomic and cannot be reordered.
401  * It also implies a memory barrier.
402  */
403 static inline int test_and_change_bit(unsigned long nr,
404         volatile unsigned long *addr)
405 {
406         unsigned short bit = nr & SZLONG_MASK;
407
408         if (cpu_has_llsc && R10000_LLSC_WAR) {
409                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
410                 unsigned long temp, res;
411
412                 __asm__ __volatile__(
413                 "       .set    mips3                                   \n"
414                 "1:     " __LL  "%0, %1         # test_and_change_bit   \n"
415                 "       xor     %2, %0, %3                              \n"
416                 "       " __SC  "%2, %1                                 \n"
417                 "       beqzl   %2, 1b                                  \n"
418                 "       and     %2, %0, %3                              \n"
419                 "       .set    mips0                                   \n"
420                 : "=&r" (temp), "=m" (*m), "=&r" (res)
421                 : "r" (1UL << bit), "m" (*m)
422                 : "memory");
423
424                 return res != 0;
425         } else if (cpu_has_llsc) {
426                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
427                 unsigned long temp, res;
428
429                 __asm__ __volatile__(
430                 "       .set    push                                    \n"
431                 "       .set    noreorder                               \n"
432                 "       .set    mips3                                   \n"
433                 "1:     " __LL  "%0, %1         # test_and_change_bit   \n"
434                 "       xor     %2, %0, %3                              \n"
435                 "       " __SC  "\t%2, %1                               \n"
436                 "       beqz    %2, 2f                                  \n"
437                 "        and    %2, %0, %3                              \n"
438                 "       .subsection 2                                   \n"
439                 "2:     b       1b                                      \n"
440                 "        nop                                            \n"
441                 "       .previous                                       \n"
442                 "       .set    pop                                     \n"
443                 : "=&r" (temp), "=m" (*m), "=&r" (res)
444                 : "r" (1UL << bit), "m" (*m)
445                 : "memory");
446
447                 return res != 0;
448         } else {
449                 volatile unsigned long *a = addr;
450                 unsigned long mask, retval;
451                 unsigned long flags;
452
453                 a += nr >> SZLONG_LOG;
454                 mask = 1UL << bit;
455                 raw_local_irq_save(flags);
456                 retval = (mask & *a) != 0;
457                 *a ^= mask;
458                 raw_local_irq_restore(flags);
459
460                 return retval;
461         }
462
463         smp_mb();
464 }
465
466 #include <asm-generic/bitops/non-atomic.h>
467
468 /*
469  * Return the bit position (0..63) of the most significant 1 bit in a word
470  * Returns -1 if no 1 bit exists
471  */
472 static inline int __ilog2(unsigned long x)
473 {
474         int lz;
475
476         if (sizeof(x) == 4) {
477                 __asm__ (
478                 "       .set    push                                    \n"
479                 "       .set    mips32                                  \n"
480                 "       clz     %0, %1                                  \n"
481                 "       .set    pop                                     \n"
482                 : "=r" (lz)
483                 : "r" (x));
484
485                 return 31 - lz;
486         }
487
488         BUG_ON(sizeof(x) != 8);
489
490         __asm__ (
491         "       .set    push                                            \n"
492         "       .set    mips64                                          \n"
493         "       dclz    %0, %1                                          \n"
494         "       .set    pop                                             \n"
495         : "=r" (lz)
496         : "r" (x));
497
498         return 63 - lz;
499 }
500
501 #if defined(CONFIG_CPU_MIPS32) || defined(CONFIG_CPU_MIPS64)
502
503 /*
504  * __ffs - find first bit in word.
505  * @word: The word to search
506  *
507  * Returns 0..SZLONG-1
508  * Undefined if no bit exists, so code should check against 0 first.
509  */
510 static inline unsigned long __ffs(unsigned long word)
511 {
512         return __ilog2(word & -word);
513 }
514
515 /*
516  * fls - find last bit set.
517  * @word: The word to search
518  *
519  * This is defined the same way as ffs.
520  * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
521  */
522 static inline int fls(int word)
523 {
524         __asm__ ("clz %0, %1" : "=r" (word) : "r" (word));
525
526         return 32 - word;
527 }
528
529 #if defined(CONFIG_64BIT) && defined(CONFIG_CPU_MIPS64)
530 static inline int fls64(__u64 word)
531 {
532         __asm__ ("dclz %0, %1" : "=r" (word) : "r" (word));
533
534         return 64 - word;
535 }
536 #else
537 #include <asm-generic/bitops/fls64.h>
538 #endif
539
540 /*
541  * ffs - find first bit set.
542  * @word: The word to search
543  *
544  * This is defined the same way as
545  * the libc and compiler builtin ffs routines, therefore
546  * differs in spirit from the above ffz (man ffs).
547  */
548 static inline int ffs(int word)
549 {
550         if (!word)
551                 return 0;
552
553         return fls(word & -word);
554 }
555
556 #else
557
558 #include <asm-generic/bitops/__ffs.h>
559 #include <asm-generic/bitops/ffs.h>
560 #include <asm-generic/bitops/fls.h>
561 #include <asm-generic/bitops/fls64.h>
562
563 #endif /*defined(CONFIG_CPU_MIPS32) || defined(CONFIG_CPU_MIPS64) */
564
565 #include <asm-generic/bitops/ffz.h>
566 #include <asm-generic/bitops/find.h>
567
568 #ifdef __KERNEL__
569
570 #include <asm-generic/bitops/sched.h>
571 #include <asm-generic/bitops/hweight.h>
572 #include <asm-generic/bitops/ext2-non-atomic.h>
573 #include <asm-generic/bitops/ext2-atomic.h>
574 #include <asm-generic/bitops/minix.h>
575
576 #endif /* __KERNEL__ */
577
578 #endif /* _ASM_BITOPS_H */