kcsan: selftest: Add test case to check memory barrier instrumentation
authorMarco Elver <elver@google.com>
Tue, 30 Nov 2021 11:44:21 +0000 (12:44 +0100)
committerPaul E. McKenney <paulmck@kernel.org>
Fri, 10 Dec 2021 00:42:27 +0000 (16:42 -0800)
Memory barrier instrumentation is crucial to avoid false positives. To
avoid surprises, run a simple test case in the boot-time selftest to
ensure memory barriers are still instrumented correctly.

Signed-off-by: Marco Elver <elver@google.com>
Signed-off-by: Paul E. McKenney <paulmck@kernel.org>
kernel/kcsan/Makefile
kernel/kcsan/selftest.c

index c2bb07f..ff47e89 100644 (file)
@@ -11,6 +11,8 @@ CFLAGS_core.o := $(call cc-option,-fno-conserve-stack) \
        -fno-stack-protector -DDISABLE_BRANCH_PROFILING
 
 obj-y := core.o debugfs.o report.o
+
+KCSAN_INSTRUMENT_BARRIERS_selftest.o := y
 obj-$(CONFIG_KCSAN_SELFTEST) += selftest.o
 
 CFLAGS_kcsan_test.o := $(CFLAGS_KCSAN) -g -fno-omit-frame-pointer
index b4295a3..08c6b84 100644 (file)
@@ -7,10 +7,15 @@
 
 #define pr_fmt(fmt) "kcsan: " fmt
 
+#include <linux/atomic.h>
+#include <linux/bitops.h>
 #include <linux/init.h>
+#include <linux/kcsan-checks.h>
 #include <linux/kernel.h>
 #include <linux/printk.h>
 #include <linux/random.h>
+#include <linux/sched.h>
+#include <linux/spinlock.h>
 #include <linux/types.h>
 
 #include "encoding.h"
@@ -103,6 +108,141 @@ static bool __init test_matching_access(void)
        return true;
 }
 
+/*
+ * Correct memory barrier instrumentation is critical to avoiding false
+ * positives: simple test to check at boot certain barriers are always properly
+ * instrumented. See kcsan_test for a more complete test.
+ */
+static bool __init test_barrier(void)
+{
+#ifdef CONFIG_KCSAN_WEAK_MEMORY
+       struct kcsan_scoped_access *reorder_access = &current->kcsan_ctx.reorder_access;
+#else
+       struct kcsan_scoped_access *reorder_access = NULL;
+#endif
+       bool ret = true;
+       arch_spinlock_t arch_spinlock = __ARCH_SPIN_LOCK_UNLOCKED;
+       DEFINE_SPINLOCK(spinlock);
+       atomic_t dummy;
+       long test_var;
+
+       if (!reorder_access || !IS_ENABLED(CONFIG_SMP))
+               return true;
+
+#define __KCSAN_CHECK_BARRIER(access_type, barrier, name)                                      \
+       do {                                                                                    \
+               reorder_access->type = (access_type) | KCSAN_ACCESS_SCOPED;                     \
+               reorder_access->size = 1;                                                       \
+               barrier;                                                                        \
+               if (reorder_access->size != 0) {                                                \
+                       pr_err("improperly instrumented type=(" #access_type "): " name "\n");  \
+                       ret = false;                                                            \
+               }                                                                               \
+       } while (0)
+#define KCSAN_CHECK_READ_BARRIER(b)  __KCSAN_CHECK_BARRIER(0, b, #b)
+#define KCSAN_CHECK_WRITE_BARRIER(b) __KCSAN_CHECK_BARRIER(KCSAN_ACCESS_WRITE, b, #b)
+#define KCSAN_CHECK_RW_BARRIER(b)    __KCSAN_CHECK_BARRIER(KCSAN_ACCESS_WRITE | KCSAN_ACCESS_COMPOUND, b, #b)
+
+       kcsan_nestable_atomic_begin(); /* No watchpoints in called functions. */
+
+       KCSAN_CHECK_READ_BARRIER(mb());
+       KCSAN_CHECK_READ_BARRIER(rmb());
+       KCSAN_CHECK_READ_BARRIER(smp_mb());
+       KCSAN_CHECK_READ_BARRIER(smp_rmb());
+       KCSAN_CHECK_READ_BARRIER(dma_rmb());
+       KCSAN_CHECK_READ_BARRIER(smp_mb__before_atomic());
+       KCSAN_CHECK_READ_BARRIER(smp_mb__after_atomic());
+       KCSAN_CHECK_READ_BARRIER(smp_mb__after_spinlock());
+       KCSAN_CHECK_READ_BARRIER(smp_store_mb(test_var, 0));
+       KCSAN_CHECK_READ_BARRIER(smp_store_release(&test_var, 0));
+       KCSAN_CHECK_READ_BARRIER(xchg(&test_var, 0));
+       KCSAN_CHECK_READ_BARRIER(xchg_release(&test_var, 0));
+       KCSAN_CHECK_READ_BARRIER(cmpxchg(&test_var, 0,  0));
+       KCSAN_CHECK_READ_BARRIER(cmpxchg_release(&test_var, 0,  0));
+       KCSAN_CHECK_READ_BARRIER(atomic_set_release(&dummy, 0));
+       KCSAN_CHECK_READ_BARRIER(atomic_add_return(1, &dummy));
+       KCSAN_CHECK_READ_BARRIER(atomic_add_return_release(1, &dummy));
+       KCSAN_CHECK_READ_BARRIER(atomic_fetch_add(1, &dummy));
+       KCSAN_CHECK_READ_BARRIER(atomic_fetch_add_release(1, &dummy));
+       KCSAN_CHECK_READ_BARRIER(test_and_set_bit(0, &test_var));
+       KCSAN_CHECK_READ_BARRIER(test_and_clear_bit(0, &test_var));
+       KCSAN_CHECK_READ_BARRIER(test_and_change_bit(0, &test_var));
+       KCSAN_CHECK_READ_BARRIER(clear_bit_unlock(0, &test_var));
+       KCSAN_CHECK_READ_BARRIER(__clear_bit_unlock(0, &test_var));
+       KCSAN_CHECK_READ_BARRIER(clear_bit_unlock_is_negative_byte(0, &test_var));
+       arch_spin_lock(&arch_spinlock);
+       KCSAN_CHECK_READ_BARRIER(arch_spin_unlock(&arch_spinlock));
+       spin_lock(&spinlock);
+       KCSAN_CHECK_READ_BARRIER(spin_unlock(&spinlock));
+
+       KCSAN_CHECK_WRITE_BARRIER(mb());
+       KCSAN_CHECK_WRITE_BARRIER(wmb());
+       KCSAN_CHECK_WRITE_BARRIER(smp_mb());
+       KCSAN_CHECK_WRITE_BARRIER(smp_wmb());
+       KCSAN_CHECK_WRITE_BARRIER(dma_wmb());
+       KCSAN_CHECK_WRITE_BARRIER(smp_mb__before_atomic());
+       KCSAN_CHECK_WRITE_BARRIER(smp_mb__after_atomic());
+       KCSAN_CHECK_WRITE_BARRIER(smp_mb__after_spinlock());
+       KCSAN_CHECK_WRITE_BARRIER(smp_store_mb(test_var, 0));
+       KCSAN_CHECK_WRITE_BARRIER(smp_store_release(&test_var, 0));
+       KCSAN_CHECK_WRITE_BARRIER(xchg(&test_var, 0));
+       KCSAN_CHECK_WRITE_BARRIER(xchg_release(&test_var, 0));
+       KCSAN_CHECK_WRITE_BARRIER(cmpxchg(&test_var, 0,  0));
+       KCSAN_CHECK_WRITE_BARRIER(cmpxchg_release(&test_var, 0,  0));
+       KCSAN_CHECK_WRITE_BARRIER(atomic_set_release(&dummy, 0));
+       KCSAN_CHECK_WRITE_BARRIER(atomic_add_return(1, &dummy));
+       KCSAN_CHECK_WRITE_BARRIER(atomic_add_return_release(1, &dummy));
+       KCSAN_CHECK_WRITE_BARRIER(atomic_fetch_add(1, &dummy));
+       KCSAN_CHECK_WRITE_BARRIER(atomic_fetch_add_release(1, &dummy));
+       KCSAN_CHECK_WRITE_BARRIER(test_and_set_bit(0, &test_var));
+       KCSAN_CHECK_WRITE_BARRIER(test_and_clear_bit(0, &test_var));
+       KCSAN_CHECK_WRITE_BARRIER(test_and_change_bit(0, &test_var));
+       KCSAN_CHECK_WRITE_BARRIER(clear_bit_unlock(0, &test_var));
+       KCSAN_CHECK_WRITE_BARRIER(__clear_bit_unlock(0, &test_var));
+       KCSAN_CHECK_WRITE_BARRIER(clear_bit_unlock_is_negative_byte(0, &test_var));
+       arch_spin_lock(&arch_spinlock);
+       KCSAN_CHECK_WRITE_BARRIER(arch_spin_unlock(&arch_spinlock));
+       spin_lock(&spinlock);
+       KCSAN_CHECK_WRITE_BARRIER(spin_unlock(&spinlock));
+
+       KCSAN_CHECK_RW_BARRIER(mb());
+       KCSAN_CHECK_RW_BARRIER(wmb());
+       KCSAN_CHECK_RW_BARRIER(rmb());
+       KCSAN_CHECK_RW_BARRIER(smp_mb());
+       KCSAN_CHECK_RW_BARRIER(smp_wmb());
+       KCSAN_CHECK_RW_BARRIER(smp_rmb());
+       KCSAN_CHECK_RW_BARRIER(dma_wmb());
+       KCSAN_CHECK_RW_BARRIER(dma_rmb());
+       KCSAN_CHECK_RW_BARRIER(smp_mb__before_atomic());
+       KCSAN_CHECK_RW_BARRIER(smp_mb__after_atomic());
+       KCSAN_CHECK_RW_BARRIER(smp_mb__after_spinlock());
+       KCSAN_CHECK_RW_BARRIER(smp_store_mb(test_var, 0));
+       KCSAN_CHECK_RW_BARRIER(smp_store_release(&test_var, 0));
+       KCSAN_CHECK_RW_BARRIER(xchg(&test_var, 0));
+       KCSAN_CHECK_RW_BARRIER(xchg_release(&test_var, 0));
+       KCSAN_CHECK_RW_BARRIER(cmpxchg(&test_var, 0,  0));
+       KCSAN_CHECK_RW_BARRIER(cmpxchg_release(&test_var, 0,  0));
+       KCSAN_CHECK_RW_BARRIER(atomic_set_release(&dummy, 0));
+       KCSAN_CHECK_RW_BARRIER(atomic_add_return(1, &dummy));
+       KCSAN_CHECK_RW_BARRIER(atomic_add_return_release(1, &dummy));
+       KCSAN_CHECK_RW_BARRIER(atomic_fetch_add(1, &dummy));
+       KCSAN_CHECK_RW_BARRIER(atomic_fetch_add_release(1, &dummy));
+       KCSAN_CHECK_RW_BARRIER(test_and_set_bit(0, &test_var));
+       KCSAN_CHECK_RW_BARRIER(test_and_clear_bit(0, &test_var));
+       KCSAN_CHECK_RW_BARRIER(test_and_change_bit(0, &test_var));
+       KCSAN_CHECK_RW_BARRIER(clear_bit_unlock(0, &test_var));
+       KCSAN_CHECK_RW_BARRIER(__clear_bit_unlock(0, &test_var));
+       KCSAN_CHECK_RW_BARRIER(clear_bit_unlock_is_negative_byte(0, &test_var));
+       arch_spin_lock(&arch_spinlock);
+       KCSAN_CHECK_RW_BARRIER(arch_spin_unlock(&arch_spinlock));
+       spin_lock(&spinlock);
+       KCSAN_CHECK_RW_BARRIER(spin_unlock(&spinlock));
+
+       kcsan_nestable_atomic_end();
+
+       return ret;
+}
+
 static int __init kcsan_selftest(void)
 {
        int passed = 0;
@@ -120,6 +260,7 @@ static int __init kcsan_selftest(void)
        RUN_TEST(test_requires);
        RUN_TEST(test_encode_decode);
        RUN_TEST(test_matching_access);
+       RUN_TEST(test_barrier);
 
        pr_info("selftest: %d/%d tests passed\n", passed, total);
        if (passed != total)