diff options
| -rw-r--r-- | include/linux/cpumask.h | 112 |
1 files changed, 1 insertions, 111 deletions
diff --git a/include/linux/cpumask.h b/include/linux/cpumask.h index dbb8367ecf56..e162d13c65ab 100644 --- a/include/linux/cpumask.h +++ b/include/linux/cpumask.h | |||
| @@ -15,10 +15,6 @@ | |||
| 15 | * see bitmap_scnprintf() and bitmap_parse_user() in lib/bitmap.c. | 15 | * see bitmap_scnprintf() and bitmap_parse_user() in lib/bitmap.c. |
| 16 | * For details of cpulist_scnprintf() and cpulist_parse(), see | 16 | * For details of cpulist_scnprintf() and cpulist_parse(), see |
| 17 | * bitmap_scnlistprintf() and bitmap_parselist(), also in bitmap.c. | 17 | * bitmap_scnlistprintf() and bitmap_parselist(), also in bitmap.c. |
| 18 | * For details of cpu_remap(), see bitmap_bitremap in lib/bitmap.c | ||
| 19 | * For details of cpus_remap(), see bitmap_remap in lib/bitmap.c. | ||
| 20 | * For details of cpus_onto(), see bitmap_onto in lib/bitmap.c. | ||
| 21 | * For details of cpus_fold(), see bitmap_fold in lib/bitmap.c. | ||
| 22 | * | 18 | * |
| 23 | * . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . | 19 | * . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . |
| 24 | * Note: The alternate operations with the suffix "_nr" are used | 20 | * Note: The alternate operations with the suffix "_nr" are used |
| @@ -47,22 +43,17 @@ | |||
| 47 | * void cpus_or(dst, src1, src2) dst = src1 | src2 [union] | 43 | * void cpus_or(dst, src1, src2) dst = src1 | src2 [union] |
| 48 | * void cpus_xor(dst, src1, src2) dst = src1 ^ src2 | 44 | * void cpus_xor(dst, src1, src2) dst = src1 ^ src2 |
| 49 | * int cpus_andnot(dst, src1, src2) dst = src1 & ~src2 | 45 | * int cpus_andnot(dst, src1, src2) dst = src1 & ~src2 |
| 50 | * void cpus_complement(dst, src) dst = ~src | ||
| 51 | * | 46 | * |
| 52 | * int cpus_equal(mask1, mask2) Does mask1 == mask2? | 47 | * int cpus_equal(mask1, mask2) Does mask1 == mask2? |
| 53 | * int cpus_intersects(mask1, mask2) Do mask1 and mask2 intersect? | 48 | * int cpus_intersects(mask1, mask2) Do mask1 and mask2 intersect? |
| 54 | * int cpus_subset(mask1, mask2) Is mask1 a subset of mask2? | 49 | * int cpus_subset(mask1, mask2) Is mask1 a subset of mask2? |
| 55 | * int cpus_empty(mask) Is mask empty (no bits sets)? | 50 | * int cpus_empty(mask) Is mask empty (no bits sets)? |
| 56 | * int cpus_full(mask) Is mask full (all bits sets)? | ||
| 57 | * int cpus_weight(mask) Hamming weigh - number of set bits | 51 | * int cpus_weight(mask) Hamming weigh - number of set bits |
| 58 | * int cpus_weight_nr(mask) Same using nr_cpu_ids instead of NR_CPUS | ||
| 59 | * | 52 | * |
| 60 | * void cpus_shift_right(dst, src, n) Shift right | ||
| 61 | * void cpus_shift_left(dst, src, n) Shift left | 53 | * void cpus_shift_left(dst, src, n) Shift left |
| 62 | * | 54 | * |
| 63 | * int first_cpu(mask) Number lowest set bit, or NR_CPUS | 55 | * int first_cpu(mask) Number lowest set bit, or NR_CPUS |
| 64 | * int next_cpu(cpu, mask) Next cpu past 'cpu', or NR_CPUS | 56 | * int next_cpu(cpu, mask) Next cpu past 'cpu', or NR_CPUS |
| 65 | * int next_cpu_nr(cpu, mask) Next cpu past 'cpu', or nr_cpu_ids | ||
| 66 | * | 57 | * |
| 67 | * cpumask_t cpumask_of_cpu(cpu) Return cpumask with bit 'cpu' set | 58 | * cpumask_t cpumask_of_cpu(cpu) Return cpumask with bit 'cpu' set |
| 68 | * (can be used as an lvalue) | 59 | * (can be used as an lvalue) |
| @@ -70,45 +61,10 @@ | |||
| 70 | * CPU_MASK_NONE Initializer - no bits set | 61 | * CPU_MASK_NONE Initializer - no bits set |
| 71 | * unsigned long *cpus_addr(mask) Array of unsigned long's in mask | 62 | * unsigned long *cpus_addr(mask) Array of unsigned long's in mask |
| 72 | * | 63 | * |
| 73 | * CPUMASK_ALLOC kmalloc's a structure that is a composite of many cpumask_t | ||
| 74 | * variables, and CPUMASK_PTR provides pointers to each field. | ||
| 75 | * | ||
| 76 | * The structure should be defined something like this: | ||
| 77 | * struct my_cpumasks { | ||
| 78 | * cpumask_t mask1; | ||
| 79 | * cpumask_t mask2; | ||
| 80 | * }; | ||
| 81 | * | ||
| 82 | * Usage is then: | ||
| 83 | * CPUMASK_ALLOC(my_cpumasks); | ||
| 84 | * CPUMASK_PTR(mask1, my_cpumasks); | ||
| 85 | * CPUMASK_PTR(mask2, my_cpumasks); | ||
| 86 | * | ||
| 87 | * --- DO NOT reference cpumask_t pointers until this check --- | ||
| 88 | * if (my_cpumasks == NULL) | ||
| 89 | * "kmalloc failed"... | ||
| 90 | * | ||
| 91 | * References are now pointers to the cpumask_t variables (*mask1, ...) | ||
| 92 | * | ||
| 93 | *if NR_CPUS > BITS_PER_LONG | ||
| 94 | * CPUMASK_ALLOC(m) Declares and allocates struct m *m = | ||
| 95 | * kmalloc(sizeof(*m), GFP_KERNEL) | ||
| 96 | * CPUMASK_FREE(m) Macro for kfree(m) | ||
| 97 | *else | ||
| 98 | * CPUMASK_ALLOC(m) Declares struct m _m, *m = &_m | ||
| 99 | * CPUMASK_FREE(m) Nop | ||
| 100 | *endif | ||
| 101 | * CPUMASK_PTR(v, m) Declares cpumask_t *v = &(m->v) | ||
| 102 | * ------------------------------------------------------------------------ | ||
| 103 | * | ||
| 104 | * int cpumask_scnprintf(buf, len, mask) Format cpumask for printing | 64 | * int cpumask_scnprintf(buf, len, mask) Format cpumask for printing |
| 105 | * int cpumask_parse_user(ubuf, ulen, mask) Parse ascii string as cpumask | 65 | * int cpumask_parse_user(ubuf, ulen, mask) Parse ascii string as cpumask |
| 106 | * int cpulist_scnprintf(buf, len, mask) Format cpumask as list for printing | 66 | * int cpulist_scnprintf(buf, len, mask) Format cpumask as list for printing |
| 107 | * int cpulist_parse(buf, map) Parse ascii string as cpulist | 67 | * int cpulist_parse(buf, map) Parse ascii string as cpulist |
| 108 | * int cpu_remap(oldbit, old, new) newbit = map(old, new)(oldbit) | ||
| 109 | * void cpus_remap(dst, src, old, new) *dst = map(old, new)(src) | ||
| 110 | * void cpus_onto(dst, orig, relmap) *dst = orig relative to relmap | ||
| 111 | * void cpus_fold(dst, orig, sz) dst bits = orig bits mod sz | ||
| 112 | * | 68 | * |
| 113 | * for_each_cpu_mask(cpu, mask) for-loop cpu over mask using NR_CPUS | 69 | * for_each_cpu_mask(cpu, mask) for-loop cpu over mask using NR_CPUS |
| 114 | * for_each_cpu_mask_nr(cpu, mask) for-loop cpu over mask using nr_cpu_ids | 70 | * for_each_cpu_mask_nr(cpu, mask) for-loop cpu over mask using nr_cpu_ids |
| @@ -142,7 +98,6 @@ | |||
| 142 | #include <linux/bitmap.h> | 98 | #include <linux/bitmap.h> |
| 143 | 99 | ||
| 144 | typedef struct cpumask { DECLARE_BITMAP(bits, NR_CPUS); } cpumask_t; | 100 | typedef struct cpumask { DECLARE_BITMAP(bits, NR_CPUS); } cpumask_t; |
| 145 | extern cpumask_t _unused_cpumask_arg_; | ||
| 146 | 101 | ||
| 147 | #ifndef CONFIG_DISABLE_OBSOLETE_CPUMASK_FUNCTIONS | 102 | #ifndef CONFIG_DISABLE_OBSOLETE_CPUMASK_FUNCTIONS |
| 148 | #define cpu_set(cpu, dst) __cpu_set((cpu), &(dst)) | 103 | #define cpu_set(cpu, dst) __cpu_set((cpu), &(dst)) |
| @@ -207,13 +162,6 @@ static inline int __cpus_andnot(cpumask_t *dstp, const cpumask_t *src1p, | |||
| 207 | return bitmap_andnot(dstp->bits, src1p->bits, src2p->bits, nbits); | 162 | return bitmap_andnot(dstp->bits, src1p->bits, src2p->bits, nbits); |
| 208 | } | 163 | } |
| 209 | 164 | ||
| 210 | #define cpus_complement(dst, src) __cpus_complement(&(dst), &(src), NR_CPUS) | ||
| 211 | static inline void __cpus_complement(cpumask_t *dstp, | ||
| 212 | const cpumask_t *srcp, int nbits) | ||
| 213 | { | ||
| 214 | bitmap_complement(dstp->bits, srcp->bits, nbits); | ||
| 215 | } | ||
| 216 | |||
| 217 | #define cpus_equal(src1, src2) __cpus_equal(&(src1), &(src2), NR_CPUS) | 165 | #define cpus_equal(src1, src2) __cpus_equal(&(src1), &(src2), NR_CPUS) |
| 218 | static inline int __cpus_equal(const cpumask_t *src1p, | 166 | static inline int __cpus_equal(const cpumask_t *src1p, |
| 219 | const cpumask_t *src2p, int nbits) | 167 | const cpumask_t *src2p, int nbits) |
| @@ -241,26 +189,12 @@ static inline int __cpus_empty(const cpumask_t *srcp, int nbits) | |||
| 241 | return bitmap_empty(srcp->bits, nbits); | 189 | return bitmap_empty(srcp->bits, nbits); |
| 242 | } | 190 | } |
| 243 | 191 | ||
| 244 | #define cpus_full(cpumask) __cpus_full(&(cpumask), NR_CPUS) | ||
| 245 | static inline int __cpus_full(const cpumask_t *srcp, int nbits) | ||
| 246 | { | ||
| 247 | return bitmap_full(srcp->bits, nbits); | ||
| 248 | } | ||
| 249 | |||
| 250 | #define cpus_weight(cpumask) __cpus_weight(&(cpumask), NR_CPUS) | 192 | #define cpus_weight(cpumask) __cpus_weight(&(cpumask), NR_CPUS) |
| 251 | static inline int __cpus_weight(const cpumask_t *srcp, int nbits) | 193 | static inline int __cpus_weight(const cpumask_t *srcp, int nbits) |
| 252 | { | 194 | { |
| 253 | return bitmap_weight(srcp->bits, nbits); | 195 | return bitmap_weight(srcp->bits, nbits); |
| 254 | } | 196 | } |
| 255 | 197 | ||
| 256 | #define cpus_shift_right(dst, src, n) \ | ||
| 257 | __cpus_shift_right(&(dst), &(src), (n), NR_CPUS) | ||
| 258 | static inline void __cpus_shift_right(cpumask_t *dstp, | ||
| 259 | const cpumask_t *srcp, int n, int nbits) | ||
| 260 | { | ||
| 261 | bitmap_shift_right(dstp->bits, srcp->bits, n, nbits); | ||
| 262 | } | ||
| 263 | |||
| 264 | #define cpus_shift_left(dst, src, n) \ | 198 | #define cpus_shift_left(dst, src, n) \ |
| 265 | __cpus_shift_left(&(dst), &(src), (n), NR_CPUS) | 199 | __cpus_shift_left(&(dst), &(src), (n), NR_CPUS) |
| 266 | static inline void __cpus_shift_left(cpumask_t *dstp, | 200 | static inline void __cpus_shift_left(cpumask_t *dstp, |
| @@ -346,46 +280,6 @@ static inline const struct cpumask *get_cpu_mask(unsigned int cpu) | |||
| 346 | 280 | ||
| 347 | #define cpus_addr(src) ((src).bits) | 281 | #define cpus_addr(src) ((src).bits) |
| 348 | 282 | ||
| 349 | #if NR_CPUS > BITS_PER_LONG | ||
| 350 | #define CPUMASK_ALLOC(m) struct m *m = kmalloc(sizeof(*m), GFP_KERNEL) | ||
| 351 | #define CPUMASK_FREE(m) kfree(m) | ||
| 352 | #else | ||
| 353 | #define CPUMASK_ALLOC(m) struct m _m, *m = &_m | ||
| 354 | #define CPUMASK_FREE(m) | ||
| 355 | #endif | ||
| 356 | #define CPUMASK_PTR(v, m) cpumask_t *v = &(m->v) | ||
| 357 | |||
| 358 | #define cpu_remap(oldbit, old, new) \ | ||
| 359 | __cpu_remap((oldbit), &(old), &(new), NR_CPUS) | ||
| 360 | static inline int __cpu_remap(int oldbit, | ||
| 361 | const cpumask_t *oldp, const cpumask_t *newp, int nbits) | ||
| 362 | { | ||
| 363 | return bitmap_bitremap(oldbit, oldp->bits, newp->bits, nbits); | ||
| 364 | } | ||
| 365 | |||
| 366 | #define cpus_remap(dst, src, old, new) \ | ||
| 367 | __cpus_remap(&(dst), &(src), &(old), &(new), NR_CPUS) | ||
| 368 | static inline void __cpus_remap(cpumask_t *dstp, const cpumask_t *srcp, | ||
| 369 | const cpumask_t *oldp, const cpumask_t *newp, int nbits) | ||
| 370 | { | ||
| 371 | bitmap_remap(dstp->bits, srcp->bits, oldp->bits, newp->bits, nbits); | ||
| 372 | } | ||
| 373 | |||
| 374 | #define cpus_onto(dst, orig, relmap) \ | ||
| 375 | __cpus_onto(&(dst), &(orig), &(relmap), NR_CPUS) | ||
| 376 | static inline void __cpus_onto(cpumask_t *dstp, const cpumask_t *origp, | ||
| 377 | const cpumask_t *relmapp, int nbits) | ||
| 378 | { | ||
| 379 | bitmap_onto(dstp->bits, origp->bits, relmapp->bits, nbits); | ||
| 380 | } | ||
| 381 | |||
| 382 | #define cpus_fold(dst, orig, sz) \ | ||
| 383 | __cpus_fold(&(dst), &(orig), sz, NR_CPUS) | ||
| 384 | static inline void __cpus_fold(cpumask_t *dstp, const cpumask_t *origp, | ||
| 385 | int sz, int nbits) | ||
| 386 | { | ||
| 387 | bitmap_fold(dstp->bits, origp->bits, sz, nbits); | ||
| 388 | } | ||
| 389 | #endif /* !CONFIG_DISABLE_OBSOLETE_CPUMASK_FUNCTIONS */ | 283 | #endif /* !CONFIG_DISABLE_OBSOLETE_CPUMASK_FUNCTIONS */ |
| 390 | 284 | ||
| 391 | #if NR_CPUS == 1 | 285 | #if NR_CPUS == 1 |
| @@ -419,18 +313,14 @@ int __any_online_cpu(const cpumask_t *mask); | |||
| 419 | #ifndef CONFIG_DISABLE_OBSOLETE_CPUMASK_FUNCTIONS | 313 | #ifndef CONFIG_DISABLE_OBSOLETE_CPUMASK_FUNCTIONS |
| 420 | #if NR_CPUS <= 64 | 314 | #if NR_CPUS <= 64 |
| 421 | 315 | ||
| 422 | #define next_cpu_nr(n, src) next_cpu(n, src) | ||
| 423 | #define cpus_weight_nr(cpumask) cpus_weight(cpumask) | ||
| 424 | #define for_each_cpu_mask_nr(cpu, mask) for_each_cpu_mask(cpu, mask) | 316 | #define for_each_cpu_mask_nr(cpu, mask) for_each_cpu_mask(cpu, mask) |
| 425 | 317 | ||
| 426 | #else /* NR_CPUS > 64 */ | 318 | #else /* NR_CPUS > 64 */ |
| 427 | 319 | ||
| 428 | int __next_cpu_nr(int n, const cpumask_t *srcp); | 320 | int __next_cpu_nr(int n, const cpumask_t *srcp); |
| 429 | #define next_cpu_nr(n, src) __next_cpu_nr((n), &(src)) | ||
| 430 | #define cpus_weight_nr(cpumask) __cpus_weight(&(cpumask), nr_cpu_ids) | ||
| 431 | #define for_each_cpu_mask_nr(cpu, mask) \ | 321 | #define for_each_cpu_mask_nr(cpu, mask) \ |
| 432 | for ((cpu) = -1; \ | 322 | for ((cpu) = -1; \ |
| 433 | (cpu) = next_cpu_nr((cpu), (mask)), \ | 323 | (cpu) = __next_cpu_nr((cpu), &(mask)), \ |
| 434 | (cpu) < nr_cpu_ids; ) | 324 | (cpu) < nr_cpu_ids; ) |
| 435 | 325 | ||
| 436 | #endif /* NR_CPUS > 64 */ | 326 | #endif /* NR_CPUS > 64 */ |
