From d05be13bcc6ec615fb2e9556a9b85d52800669b6 Mon Sep 17 00:00:00 2001 From: Jiri Slaby Date: Thu, 18 Oct 2007 23:40:31 -0700 Subject: [PATCH] define first set of BIT* macros define first set of BIT* macros - move BITOP_MASK and BITOP_WORD from asm-generic/bitops/atomic.h to include/linux/bitops.h and rename it to BIT_MASK and BIT_WORD - move BITS_TO_LONGS and BITS_PER_BYTE to bitops.h too and allow easily define another BITS_TO_something (e.g. in event.c) by BITS_TO_TYPE macro Remaining (and common) BIT macro will be defined after all occurences and conflicts will be sorted out in the patches. Signed-off-by: Jiri Slaby Signed-off-by: Andrew Morton Signed-off-by: Linus Torvalds --- include/asm-generic/bitops/atomic.h | 27 ++++++++++------------- include/asm-generic/bitops/non-atomic.h | 29 +++++++++++-------------- include/linux/bitops.h | 8 +++++++ include/linux/types.h | 3 --- 4 files changed, 33 insertions(+), 34 deletions(-) diff --git a/include/asm-generic/bitops/atomic.h b/include/asm-generic/bitops/atomic.h index cd8a9641bd66..4657f3e410fc 100644 --- a/include/asm-generic/bitops/atomic.h +++ b/include/asm-generic/bitops/atomic.h @@ -3,9 +3,6 @@ #include -#define BITOP_MASK(nr) (1UL << ((nr) % BITS_PER_LONG)) -#define BITOP_WORD(nr) ((nr) / BITS_PER_LONG) - #ifdef CONFIG_SMP #include #include /* we use L1_CACHE_BYTES */ @@ -66,8 +63,8 @@ extern raw_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned; */ static inline void set_bit(int nr, volatile unsigned long *addr) { - unsigned long mask = BITOP_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr); + unsigned long mask = BIT_MASK(nr); + unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); unsigned long flags; _atomic_spin_lock_irqsave(p, flags); @@ -87,8 +84,8 @@ static inline void set_bit(int nr, volatile unsigned long *addr) */ static inline void clear_bit(int nr, volatile unsigned long *addr) { - unsigned long mask = BITOP_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr); + unsigned long mask = BIT_MASK(nr); + unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); unsigned long flags; _atomic_spin_lock_irqsave(p, flags); @@ -108,8 +105,8 @@ static inline void clear_bit(int nr, volatile unsigned long *addr) */ static inline void change_bit(int nr, volatile unsigned long *addr) { - unsigned long mask = BITOP_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr); + unsigned long mask = BIT_MASK(nr); + unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); unsigned long flags; _atomic_spin_lock_irqsave(p, flags); @@ -128,8 +125,8 @@ static inline void change_bit(int nr, volatile unsigned long *addr) */ static inline int test_and_set_bit(int nr, volatile unsigned long *addr) { - unsigned long mask = BITOP_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr); + unsigned long mask = BIT_MASK(nr); + unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); unsigned long old; unsigned long flags; @@ -152,8 +149,8 @@ static inline int test_and_set_bit(int nr, volatile unsigned long *addr) */ static inline int test_and_clear_bit(int nr, volatile unsigned long *addr) { - unsigned long mask = BITOP_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr); + unsigned long mask = BIT_MASK(nr); + unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); unsigned long old; unsigned long flags; @@ -175,8 +172,8 @@ static inline int test_and_clear_bit(int nr, volatile unsigned long *addr) */ static inline int test_and_change_bit(int nr, volatile unsigned long *addr) { - unsigned long mask = BITOP_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr); + unsigned long mask = BIT_MASK(nr); + unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); unsigned long old; unsigned long flags; diff --git a/include/asm-generic/bitops/non-atomic.h b/include/asm-generic/bitops/non-atomic.h index 46a825cf2ae1..697cc2b7e0f0 100644 --- a/include/asm-generic/bitops/non-atomic.h +++ b/include/asm-generic/bitops/non-atomic.h @@ -3,9 +3,6 @@ #include -#define BITOP_MASK(nr) (1UL << ((nr) % BITS_PER_LONG)) -#define BITOP_WORD(nr) ((nr) / BITS_PER_LONG) - /** * __set_bit - Set a bit in memory * @nr: the bit to set @@ -17,16 +14,16 @@ */ static inline void __set_bit(int nr, volatile unsigned long *addr) { - unsigned long mask = BITOP_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr); + unsigned long mask = BIT_MASK(nr); + unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); *p |= mask; } static inline void __clear_bit(int nr, volatile unsigned long *addr) { - unsigned long mask = BITOP_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr); + unsigned long mask = BIT_MASK(nr); + unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); *p &= ~mask; } @@ -42,8 +39,8 @@ static inline void __clear_bit(int nr, volatile unsigned long *addr) */ static inline void __change_bit(int nr, volatile unsigned long *addr) { - unsigned long mask = BITOP_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr); + unsigned long mask = BIT_MASK(nr); + unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); *p ^= mask; } @@ -59,8 +56,8 @@ static inline void __change_bit(int nr, volatile unsigned long *addr) */ static inline int __test_and_set_bit(int nr, volatile unsigned long *addr) { - unsigned long mask = BITOP_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr); + unsigned long mask = BIT_MASK(nr); + unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); unsigned long old = *p; *p = old | mask; @@ -78,8 +75,8 @@ static inline int __test_and_set_bit(int nr, volatile unsigned long *addr) */ static inline int __test_and_clear_bit(int nr, volatile unsigned long *addr) { - unsigned long mask = BITOP_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr); + unsigned long mask = BIT_MASK(nr); + unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); unsigned long old = *p; *p = old & ~mask; @@ -90,8 +87,8 @@ static inline int __test_and_clear_bit(int nr, volatile unsigned long *addr) static inline int __test_and_change_bit(int nr, volatile unsigned long *addr) { - unsigned long mask = BITOP_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr); + unsigned long mask = BIT_MASK(nr); + unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); unsigned long old = *p; *p = old ^ mask; @@ -105,7 +102,7 @@ static inline int __test_and_change_bit(int nr, */ static inline int test_bit(int nr, const volatile unsigned long *addr) { - return 1UL & (addr[BITOP_WORD(nr)] >> (nr & (BITS_PER_LONG-1))); + return 1UL & (addr[BIT_WORD(nr)] >> (nr & (BITS_PER_LONG-1))); } #endif /* _ASM_GENERIC_BITOPS_NON_ATOMIC_H_ */ diff --git a/include/linux/bitops.h b/include/linux/bitops.h index b9fb8ee3308b..939e80bdbef7 100644 --- a/include/linux/bitops.h +++ b/include/linux/bitops.h @@ -2,6 +2,14 @@ #define _LINUX_BITOPS_H #include +#ifdef __KERNEL__ +#define BIT_MASK(nr) (1UL << ((nr) % BITS_PER_LONG)) +#define BIT_WORD(nr) ((nr) / BITS_PER_LONG) +#define BITS_TO_TYPE(nr, t) (((nr)+(t)-1)/(t)) +#define BITS_TO_LONGS(nr) BITS_TO_TYPE(nr, BITS_PER_LONG) +#define BITS_PER_BYTE 8 +#endif + /* * Include this here because some architectures need generic_ffs/fls in * scope diff --git a/include/linux/types.h b/include/linux/types.h index 0351bf2fac85..4f0dad21c917 100644 --- a/include/linux/types.h +++ b/include/linux/types.h @@ -3,12 +3,9 @@ #ifdef __KERNEL__ -#define BITS_TO_LONGS(bits) \ - (((bits)+BITS_PER_LONG-1)/BITS_PER_LONG) #define DECLARE_BITMAP(name,bits) \ unsigned long name[BITS_TO_LONGS(bits)] -#define BITS_PER_BYTE 8 #endif #include -- 2.39.2