From 9779048d71b18ea85c067a85b648cebb2d29858e Mon Sep 17 00:00:00 2001 From: Vasily Gorbik Date: Sun, 14 Jul 2019 20:49:29 +0200 Subject: s390/kasan: add bitops instrumentation Add KASAN instrumentation of architecture-specific asm implementation of bitops. It also covers s390 specific *_inv functions. Signed-off-by: Vasily Gorbik Signed-off-by: Heiko Carstens --- arch/s390/include/asm/bitops.h | 65 +++++++++++++++++++++++------------------- 1 file changed, 35 insertions(+), 30 deletions(-) (limited to 'arch/s390') diff --git a/arch/s390/include/asm/bitops.h b/arch/s390/include/asm/bitops.h index 74fafd8baaef..b8833ac983fa 100644 --- a/arch/s390/include/asm/bitops.h +++ b/arch/s390/include/asm/bitops.h @@ -56,7 +56,7 @@ __bitops_byte(unsigned long nr, volatile unsigned long *ptr) return ((unsigned char *)ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3); } -static inline void set_bit(unsigned long nr, volatile unsigned long *ptr) +static inline void arch_set_bit(unsigned long nr, volatile unsigned long *ptr) { unsigned long *addr = __bitops_word(nr, ptr); unsigned long mask; @@ -77,7 +77,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *ptr) __atomic64_or(mask, (long *)addr); } -static inline void clear_bit(unsigned long nr, volatile unsigned long *ptr) +static inline void arch_clear_bit(unsigned long nr, volatile unsigned long *ptr) { unsigned long *addr = __bitops_word(nr, ptr); unsigned long mask; @@ -98,7 +98,8 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *ptr) __atomic64_and(mask, (long *)addr); } -static inline void change_bit(unsigned long nr, volatile unsigned long *ptr) +static inline void arch_change_bit(unsigned long nr, + volatile unsigned long *ptr) { unsigned long *addr = __bitops_word(nr, ptr); unsigned long mask; @@ -119,8 +120,8 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *ptr) __atomic64_xor(mask, (long *)addr); } -static inline bool -test_and_set_bit(unsigned long nr, volatile unsigned long *ptr) +static inline bool arch_test_and_set_bit(unsigned long nr, + volatile unsigned long *ptr) { unsigned long *addr = __bitops_word(nr, ptr); unsigned long old, mask; @@ -130,8 +131,8 @@ test_and_set_bit(unsigned long nr, volatile unsigned long *ptr) return (old & mask) != 0; } -static inline bool -test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr) +static inline bool arch_test_and_clear_bit(unsigned long nr, + volatile unsigned long *ptr) { unsigned long *addr = __bitops_word(nr, ptr); unsigned long old, mask; @@ -141,8 +142,8 @@ test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr) return (old & ~mask) != 0; } -static inline bool -test_and_change_bit(unsigned long nr, volatile unsigned long *ptr) +static inline bool arch_test_and_change_bit(unsigned long nr, + volatile unsigned long *ptr) { unsigned long *addr = __bitops_word(nr, ptr); unsigned long old, mask; @@ -152,30 +153,31 @@ test_and_change_bit(unsigned long nr, volatile unsigned long *ptr) return (old & mask) != 0; } -static inline void __set_bit(unsigned long nr, volatile unsigned long *ptr) +static inline void arch___set_bit(unsigned long nr, volatile unsigned long *ptr) { unsigned char *addr = __bitops_byte(nr, ptr); *addr |= 1 << (nr & 7); } -static inline void -__clear_bit(unsigned long nr, volatile unsigned long *ptr) +static inline void arch___clear_bit(unsigned long nr, + volatile unsigned long *ptr) { unsigned char *addr = __bitops_byte(nr, ptr); *addr &= ~(1 << (nr & 7)); } -static inline void __change_bit(unsigned long nr, volatile unsigned long *ptr) +static inline void arch___change_bit(unsigned long nr, + volatile unsigned long *ptr) { unsigned char *addr = __bitops_byte(nr, ptr); *addr ^= 1 << (nr & 7); } -static inline bool -__test_and_set_bit(unsigned long nr, volatile unsigned long *ptr) +static inline bool arch___test_and_set_bit(unsigned long nr, + volatile unsigned long *ptr) { unsigned char *addr = __bitops_byte(nr, ptr); unsigned char ch; @@ -185,8 +187,8 @@ __test_and_set_bit(unsigned long nr, volatile unsigned long *ptr) return (ch >> (nr & 7)) & 1; } -static inline bool -__test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr) +static inline bool arch___test_and_clear_bit(unsigned long nr, + volatile unsigned long *ptr) { unsigned char *addr = __bitops_byte(nr, ptr); unsigned char ch; @@ -196,8 +198,8 @@ __test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr) return (ch >> (nr & 7)) & 1; } -static inline bool -__test_and_change_bit(unsigned long nr, volatile unsigned long *ptr) +static inline bool arch___test_and_change_bit(unsigned long nr, + volatile unsigned long *ptr) { unsigned char *addr = __bitops_byte(nr, ptr); unsigned char ch; @@ -207,7 +209,8 @@ __test_and_change_bit(unsigned long nr, volatile unsigned long *ptr) return (ch >> (nr & 7)) & 1; } -static inline bool test_bit(unsigned long nr, const volatile unsigned long *ptr) +static inline bool arch_test_bit(unsigned long nr, + const volatile unsigned long *ptr) { const volatile unsigned char *addr; @@ -216,28 +219,30 @@ static inline bool test_bit(unsigned long nr, const volatile unsigned long *ptr) return (*addr >> (nr & 7)) & 1; } -static inline bool test_and_set_bit_lock(unsigned long nr, - volatile unsigned long *ptr) +static inline bool arch_test_and_set_bit_lock(unsigned long nr, + volatile unsigned long *ptr) { - if (test_bit(nr, ptr)) + if (arch_test_bit(nr, ptr)) return 1; - return test_and_set_bit(nr, ptr); + return arch_test_and_set_bit(nr, ptr); } -static inline void clear_bit_unlock(unsigned long nr, - volatile unsigned long *ptr) +static inline void arch_clear_bit_unlock(unsigned long nr, + volatile unsigned long *ptr) { smp_mb__before_atomic(); - clear_bit(nr, ptr); + arch_clear_bit(nr, ptr); } -static inline void __clear_bit_unlock(unsigned long nr, - volatile unsigned long *ptr) +static inline void arch___clear_bit_unlock(unsigned long nr, + volatile unsigned long *ptr) { smp_mb(); - __clear_bit(nr, ptr); + arch___clear_bit(nr, ptr); } +#include + /* * Functions which use MSB0 bit numbering. * The bits are numbered: -- cgit v1.2.3-55-g7522