arm64: instrument smp_{load_acquire,store_release}
authorMark Rutland <mark.rutland@arm.com>
Tue, 9 Apr 2019 11:13:13 +0000 (12:13 +0100)
committerWill Deacon <will.deacon@arm.com>
Tue, 16 Apr 2019 15:28:00 +0000 (16:28 +0100)
Our __smp_store_release() and __smp_load_acquire() macros use inline
assembly, which is opaque to kasan. This means that kasan can't catch
erroneous use of these.

This patch adds kasan instrumentation to both.

Cc: Catalin Marinas <catalin.marinas@arm.com>
Signed-off-by: Mark Rutland <mark.rutland@arm.com>
[will: consistently use *p as argument to sizeof]
Signed-off-by: Will Deacon <will.deacon@arm.com>
arch/arm64/include/asm/barrier.h

index f66bb04fdf2dd3cc21f79a36e3d2d79115c7f670..85b6bedbcc68037a907f462e1c63691c4f5d26de 100644 (file)
@@ -20,6 +20,8 @@
 
 #ifndef __ASSEMBLY__
 
+#include <linux/kasan-checks.h>
+
 #define __nops(n)      ".rept  " #n "\nnop\n.endr\n"
 #define nops(n)                asm volatile(__nops(n))
 
@@ -72,31 +74,33 @@ static inline unsigned long array_index_mask_nospec(unsigned long idx,
 
 #define __smp_store_release(p, v)                                      \
 do {                                                                   \
+       typeof(p) __p = (p);                                            \
        union { typeof(*p) __val; char __c[1]; } __u =                  \
-               { .__val = (__force typeof(*p)) (v) };                  \
+               { .__val = (__force typeof(*p)) (v) };                  \
        compiletime_assert_atomic_type(*p);                             \
+       kasan_check_write(__p, sizeof(*p));                             \
        switch (sizeof(*p)) {                                           \
        case 1:                                                         \
                asm volatile ("stlrb %w1, %0"                           \
-                               : "=Q" (*p)                             \
+                               : "=Q" (*__p)                           \
                                : "r" (*(__u8 *)__u.__c)                \
                                : "memory");                            \
                break;                                                  \
        case 2:                                                         \
                asm volatile ("stlrh %w1, %0"                           \
-                               : "=Q" (*p)                             \
+                               : "=Q" (*__p)                           \
                                : "r" (*(__u16 *)__u.__c)               \
                                : "memory");                            \
                break;                                                  \
        case 4:                                                         \
                asm volatile ("stlr %w1, %0"                            \
-                               : "=Q" (*p)                             \
+                               : "=Q" (*__p)                           \
                                : "r" (*(__u32 *)__u.__c)               \
                                : "memory");                            \
                break;                                                  \
        case 8:                                                         \
                asm volatile ("stlr %1, %0"                             \
-                               : "=Q" (*p)                             \
+                               : "=Q" (*__p)                           \
                                : "r" (*(__u64 *)__u.__c)               \
                                : "memory");                            \
                break;                                                  \
@@ -106,27 +110,29 @@ do {                                                                      \
 #define __smp_load_acquire(p)                                          \
 ({                                                                     \
        union { typeof(*p) __val; char __c[1]; } __u;                   \
+       typeof(p) __p = (p);                                            \
        compiletime_assert_atomic_type(*p);                             \
+       kasan_check_read(__p, sizeof(*p));                              \
        switch (sizeof(*p)) {                                           \
        case 1:                                                         \
                asm volatile ("ldarb %w0, %1"                           \
                        : "=r" (*(__u8 *)__u.__c)                       \
-                       : "Q" (*p) : "memory");                         \
+                       : "Q" (*__p) : "memory");                       \
                break;                                                  \
        case 2:                                                         \
                asm volatile ("ldarh %w0, %1"                           \
                        : "=r" (*(__u16 *)__u.__c)                      \
-                       : "Q" (*p) : "memory");                         \
+                       : "Q" (*__p) : "memory");                       \
                break;                                                  \
        case 4:                                                         \
                asm volatile ("ldar %w0, %1"                            \
                        : "=r" (*(__u32 *)__u.__c)                      \
-                       : "Q" (*p) : "memory");                         \
+                       : "Q" (*__p) : "memory");                       \
                break;                                                  \
        case 8:                                                         \
                asm volatile ("ldar %0, %1"                             \
                        : "=r" (*(__u64 *)__u.__c)                      \
-                       : "Q" (*p) : "memory");                         \
+                       : "Q" (*__p) : "memory");                       \
                break;                                                  \
        }                                                               \
        __u.__val;                                                      \