Lukasz Majewski | 916fa097 | 2018-11-23 17:36:19 +0100 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0+ |
| 2 | #ifndef __LINUX_BITMAP_H |
| 3 | #define __LINUX_BITMAP_H |
| 4 | |
| 5 | #include <asm/types.h> |
| 6 | #include <linux/types.h> |
| 7 | #include <linux/bitops.h> |
Vignesh Raghavendra | 016e6d6 | 2019-12-09 10:25:31 +0530 | [diff] [blame] | 8 | #include <linux/string.h> |
Lukasz Majewski | 916fa097 | 2018-11-23 17:36:19 +0100 | [diff] [blame] | 9 | |
Vignesh Raghavendra | 016e6d6 | 2019-12-09 10:25:31 +0530 | [diff] [blame] | 10 | #ifdef __LITTLE_ENDIAN |
| 11 | #define BITMAP_MEM_ALIGNMENT 8 |
| 12 | #else |
| 13 | #define BITMAP_MEM_ALIGNMENT (8 * sizeof(unsigned long)) |
| 14 | #endif |
| 15 | #define BITMAP_MEM_MASK (BITMAP_MEM_ALIGNMENT - 1) |
| 16 | |
| 17 | #define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) & (BITS_PER_LONG - 1))) |
| 18 | #define BITMAP_LAST_WORD_MASK(nbits) (~0UL >> (-(nbits) & (BITS_PER_LONG - 1))) |
Lukasz Majewski | 916fa097 | 2018-11-23 17:36:19 +0100 | [diff] [blame] | 19 | #define small_const_nbits(nbits) \ |
| 20 | (__builtin_constant_p(nbits) && (nbits) <= BITS_PER_LONG) |
| 21 | |
Vignesh Raghavendra | 016e6d6 | 2019-12-09 10:25:31 +0530 | [diff] [blame] | 22 | static inline void |
| 23 | __bitmap_or(unsigned long *dst, const unsigned long *bitmap1, |
| 24 | const unsigned long *bitmap2, unsigned int bits) |
| 25 | { |
| 26 | unsigned int k; |
| 27 | unsigned int nr = BITS_TO_LONGS(bits); |
| 28 | |
| 29 | for (k = 0; k < nr; k++) |
| 30 | dst[k] = bitmap1[k] | bitmap2[k]; |
| 31 | } |
| 32 | |
| 33 | static inline int |
| 34 | __bitmap_weight(const unsigned long *bitmap, unsigned int bits) |
| 35 | { |
| 36 | unsigned int k, lim = bits / BITS_PER_LONG; |
| 37 | int w = 0; |
| 38 | |
| 39 | for (k = 0; k < lim; k++) |
| 40 | w += hweight_long(bitmap[k]); |
| 41 | |
| 42 | if (bits % BITS_PER_LONG) |
| 43 | w += hweight_long(bitmap[k] & BITMAP_LAST_WORD_MASK(bits)); |
| 44 | |
| 45 | return w; |
| 46 | } |
| 47 | |
| 48 | static inline void |
| 49 | __bitmap_set(unsigned long *map, unsigned int start, int len) |
| 50 | { |
| 51 | unsigned long *p = map + BIT_WORD(start); |
| 52 | const unsigned int size = start + len; |
| 53 | int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG); |
| 54 | unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start); |
| 55 | |
| 56 | while (len - bits_to_set >= 0) { |
| 57 | *p |= mask_to_set; |
| 58 | len -= bits_to_set; |
| 59 | bits_to_set = BITS_PER_LONG; |
| 60 | mask_to_set = ~0UL; |
| 61 | p++; |
| 62 | } |
| 63 | if (len) { |
| 64 | mask_to_set &= BITMAP_LAST_WORD_MASK(size); |
| 65 | *p |= mask_to_set; |
| 66 | } |
| 67 | } |
| 68 | |
| 69 | static inline void |
| 70 | __bitmap_clear(unsigned long *map, unsigned int start, int len) |
| 71 | { |
| 72 | unsigned long *p = map + BIT_WORD(start); |
| 73 | const unsigned int size = start + len; |
| 74 | int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG); |
| 75 | unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start); |
| 76 | |
| 77 | while (len - bits_to_clear >= 0) { |
| 78 | *p &= ~mask_to_clear; |
| 79 | len -= bits_to_clear; |
| 80 | bits_to_clear = BITS_PER_LONG; |
| 81 | mask_to_clear = ~0UL; |
| 82 | p++; |
| 83 | } |
| 84 | if (len) { |
| 85 | mask_to_clear &= BITMAP_LAST_WORD_MASK(size); |
| 86 | *p &= ~mask_to_clear; |
| 87 | } |
| 88 | } |
| 89 | |
Lukasz Majewski | 916fa097 | 2018-11-23 17:36:19 +0100 | [diff] [blame] | 90 | static inline void bitmap_zero(unsigned long *dst, int nbits) |
| 91 | { |
| 92 | if (small_const_nbits(nbits)) { |
| 93 | *dst = 0UL; |
| 94 | } else { |
| 95 | int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long); |
| 96 | |
| 97 | memset(dst, 0, len); |
| 98 | } |
| 99 | } |
| 100 | |
Vignesh Raghavendra | c93e305 | 2019-10-01 17:26:30 +0530 | [diff] [blame] | 101 | static inline unsigned long |
| 102 | find_next_bit(const unsigned long *addr, unsigned long size, |
| 103 | unsigned long offset) |
| 104 | { |
| 105 | const unsigned long *p = addr + BIT_WORD(offset); |
| 106 | unsigned long result = offset & ~(BITS_PER_LONG - 1); |
| 107 | unsigned long tmp; |
| 108 | |
| 109 | if (offset >= size) |
| 110 | return size; |
| 111 | size -= result; |
| 112 | offset %= BITS_PER_LONG; |
| 113 | if (offset) { |
| 114 | tmp = *(p++); |
| 115 | tmp &= (~0UL << offset); |
| 116 | if (size < BITS_PER_LONG) |
| 117 | goto found_first; |
| 118 | if (tmp) |
| 119 | goto found_middle; |
| 120 | size -= BITS_PER_LONG; |
| 121 | result += BITS_PER_LONG; |
| 122 | } |
| 123 | while (size & ~(BITS_PER_LONG - 1)) { |
| 124 | tmp = *(p++); |
| 125 | if ((tmp)) |
| 126 | goto found_middle; |
| 127 | result += BITS_PER_LONG; |
| 128 | size -= BITS_PER_LONG; |
| 129 | } |
| 130 | if (!size) |
| 131 | return result; |
| 132 | tmp = *p; |
| 133 | |
| 134 | found_first: |
| 135 | tmp &= (~0UL >> (BITS_PER_LONG - size)); |
| 136 | if (tmp == 0UL) /* Are any bits set? */ |
| 137 | return result + size; /* Nope. */ |
| 138 | found_middle: |
| 139 | return result + __ffs(tmp); |
| 140 | } |
| 141 | |
| 142 | /* |
| 143 | * Find the first set bit in a memory region. |
| 144 | */ |
| 145 | static inline unsigned long find_first_bit(const unsigned long *addr, unsigned long size) |
| 146 | { |
| 147 | unsigned long idx; |
| 148 | |
| 149 | for (idx = 0; idx * BITS_PER_LONG < size; idx++) { |
| 150 | if (addr[idx]) |
| 151 | return min(idx * BITS_PER_LONG + __ffs(addr[idx]), size); |
| 152 | } |
| 153 | |
| 154 | return size; |
| 155 | } |
| 156 | |
| 157 | #define for_each_set_bit(bit, addr, size) \ |
| 158 | for ((bit) = find_first_bit((addr), (size)); \ |
| 159 | (bit) < (size); \ |
| 160 | (bit) = find_next_bit((addr), (size), (bit) + 1)) |
| 161 | |
Vignesh Raghavendra | 016e6d6 | 2019-12-09 10:25:31 +0530 | [diff] [blame] | 162 | static inline void bitmap_fill(unsigned long *dst, unsigned int nbits) |
| 163 | { |
| 164 | if (small_const_nbits(nbits)) { |
| 165 | *dst = ~0UL; |
| 166 | } else { |
| 167 | unsigned int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long); |
| 168 | |
| 169 | memset(dst, 0xff, len); |
| 170 | } |
| 171 | } |
| 172 | |
| 173 | static inline void bitmap_or(unsigned long *dst, const unsigned long *src1, |
| 174 | const unsigned long *src2, unsigned int nbits) |
| 175 | { |
| 176 | if (small_const_nbits(nbits)) |
| 177 | *dst = *src1 | *src2; |
| 178 | else |
| 179 | __bitmap_or(dst, src1, src2, nbits); |
| 180 | } |
| 181 | |
| 182 | static inline int bitmap_weight(const unsigned long *src, unsigned int nbits) |
| 183 | { |
| 184 | if (small_const_nbits(nbits)) |
| 185 | return hweight_long(*src & BITMAP_LAST_WORD_MASK(nbits)); |
| 186 | return __bitmap_weight(src, nbits); |
| 187 | } |
| 188 | |
| 189 | static inline void bitmap_set(unsigned long *map, unsigned int start, |
| 190 | unsigned int nbits) |
| 191 | { |
| 192 | if (__builtin_constant_p(nbits) && nbits == 1) |
| 193 | __set_bit(start, map); |
| 194 | else if (__builtin_constant_p(start & BITMAP_MEM_MASK) && |
| 195 | IS_ALIGNED(start, BITMAP_MEM_ALIGNMENT) && |
| 196 | __builtin_constant_p(nbits & BITMAP_MEM_MASK) && |
| 197 | IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT)) |
| 198 | memset((char *)map + start / 8, 0xff, nbits / 8); |
| 199 | else |
| 200 | __bitmap_set(map, start, nbits); |
| 201 | } |
| 202 | |
| 203 | static inline void bitmap_clear(unsigned long *map, unsigned int start, |
| 204 | unsigned int nbits) |
| 205 | { |
| 206 | if (__builtin_constant_p(nbits) && nbits == 1) |
| 207 | __clear_bit(start, map); |
| 208 | else if (__builtin_constant_p(start & BITMAP_MEM_MASK) && |
| 209 | IS_ALIGNED(start, BITMAP_MEM_ALIGNMENT) && |
| 210 | __builtin_constant_p(nbits & BITMAP_MEM_MASK) && |
| 211 | IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT)) |
| 212 | memset((char *)map + start / 8, 0, nbits / 8); |
| 213 | else |
| 214 | __bitmap_clear(map, start, nbits); |
| 215 | } |
| 216 | |
Lukasz Majewski | 916fa097 | 2018-11-23 17:36:19 +0100 | [diff] [blame] | 217 | #endif /* __LINUX_BITMAP_H */ |