Add the rt linux 4.1.3-rt3 as base
[kvmfornfv.git] / kernel / arch / s390 / lib / find.c
1 /*
2  * MSB0 numbered special bitops handling.
3  *
4  * On s390x the bits are numbered:
5  *   |0..............63|64............127|128...........191|192...........255|
6  * and on s390:
7  *   |0.....31|32....63|64....95|96...127|128..159|160..191|192..223|224..255|
8  *
9  * The reason for this bit numbering is the fact that the hardware sets bits
10  * in a bitmap starting at bit 0 (MSB) and we don't want to scan the bitmap
11  * from the 'wrong end'.
12  */
13
14 #include <linux/compiler.h>
15 #include <linux/bitops.h>
16 #include <linux/export.h>
17
18 unsigned long find_first_bit_inv(const unsigned long *addr, unsigned long size)
19 {
20         const unsigned long *p = addr;
21         unsigned long result = 0;
22         unsigned long tmp;
23
24         while (size & ~(BITS_PER_LONG - 1)) {
25                 if ((tmp = *(p++)))
26                         goto found;
27                 result += BITS_PER_LONG;
28                 size -= BITS_PER_LONG;
29         }
30         if (!size)
31                 return result;
32         tmp = (*p) & (~0UL << (BITS_PER_LONG - size));
33         if (!tmp)               /* Are any bits set? */
34                 return result + size;   /* Nope. */
35 found:
36         return result + (__fls(tmp) ^ (BITS_PER_LONG - 1));
37 }
38 EXPORT_SYMBOL(find_first_bit_inv);
39
40 unsigned long find_next_bit_inv(const unsigned long *addr, unsigned long size,
41                                 unsigned long offset)
42 {
43         const unsigned long *p = addr + (offset / BITS_PER_LONG);
44         unsigned long result = offset & ~(BITS_PER_LONG - 1);
45         unsigned long tmp;
46
47         if (offset >= size)
48                 return size;
49         size -= result;
50         offset %= BITS_PER_LONG;
51         if (offset) {
52                 tmp = *(p++);
53                 tmp &= (~0UL >> offset);
54                 if (size < BITS_PER_LONG)
55                         goto found_first;
56                 if (tmp)
57                         goto found_middle;
58                 size -= BITS_PER_LONG;
59                 result += BITS_PER_LONG;
60         }
61         while (size & ~(BITS_PER_LONG-1)) {
62                 if ((tmp = *(p++)))
63                         goto found_middle;
64                 result += BITS_PER_LONG;
65                 size -= BITS_PER_LONG;
66         }
67         if (!size)
68                 return result;
69         tmp = *p;
70 found_first:
71         tmp &= (~0UL << (BITS_PER_LONG - size));
72         if (!tmp)               /* Are any bits set? */
73                 return result + size;   /* Nope. */
74 found_middle:
75         return result + (__fls(tmp) ^ (BITS_PER_LONG - 1));
76 }
77 EXPORT_SYMBOL(find_next_bit_inv);