selftest.c 9.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * KCSAN short boot-time selftests.
  4. *
  5. * Copyright (C) 2019, Google LLC.
  6. */
  7. #define pr_fmt(fmt) "kcsan: " fmt
  8. #include <linux/atomic.h>
  9. #include <linux/bitops.h>
  10. #include <linux/init.h>
  11. #include <linux/kcsan-checks.h>
  12. #include <linux/kernel.h>
  13. #include <linux/printk.h>
  14. #include <linux/random.h>
  15. #include <linux/sched.h>
  16. #include <linux/spinlock.h>
  17. #include <linux/types.h>
  18. #include "encoding.h"
  19. #define ITERS_PER_TEST 2000
  20. /* Test requirements. */
  21. static bool __init test_requires(void)
  22. {
  23. /* random should be initialized for the below tests */
  24. return get_random_u32() + get_random_u32() != 0;
  25. }
  26. /*
  27. * Test watchpoint encode and decode: check that encoding some access's info,
  28. * and then subsequent decode preserves the access's info.
  29. */
  30. static bool __init test_encode_decode(void)
  31. {
  32. int i;
  33. for (i = 0; i < ITERS_PER_TEST; ++i) {
  34. size_t size = prandom_u32_max(MAX_ENCODABLE_SIZE) + 1;
  35. bool is_write = !!prandom_u32_max(2);
  36. unsigned long verif_masked_addr;
  37. long encoded_watchpoint;
  38. bool verif_is_write;
  39. unsigned long addr;
  40. size_t verif_size;
  41. get_random_bytes(&addr, sizeof(addr));
  42. if (addr < PAGE_SIZE)
  43. addr = PAGE_SIZE;
  44. if (WARN_ON(!check_encodable(addr, size)))
  45. return false;
  46. encoded_watchpoint = encode_watchpoint(addr, size, is_write);
  47. /* Check special watchpoints */
  48. if (WARN_ON(decode_watchpoint(INVALID_WATCHPOINT, &verif_masked_addr, &verif_size, &verif_is_write)))
  49. return false;
  50. if (WARN_ON(decode_watchpoint(CONSUMED_WATCHPOINT, &verif_masked_addr, &verif_size, &verif_is_write)))
  51. return false;
  52. /* Check decoding watchpoint returns same data */
  53. if (WARN_ON(!decode_watchpoint(encoded_watchpoint, &verif_masked_addr, &verif_size, &verif_is_write)))
  54. return false;
  55. if (WARN_ON(verif_masked_addr != (addr & WATCHPOINT_ADDR_MASK)))
  56. goto fail;
  57. if (WARN_ON(verif_size != size))
  58. goto fail;
  59. if (WARN_ON(is_write != verif_is_write))
  60. goto fail;
  61. continue;
  62. fail:
  63. pr_err("%s fail: %s %zu bytes @ %lx -> encoded: %lx -> %s %zu bytes @ %lx\n",
  64. __func__, is_write ? "write" : "read", size, addr, encoded_watchpoint,
  65. verif_is_write ? "write" : "read", verif_size, verif_masked_addr);
  66. return false;
  67. }
  68. return true;
  69. }
  70. /* Test access matching function. */
  71. static bool __init test_matching_access(void)
  72. {
  73. if (WARN_ON(!matching_access(10, 1, 10, 1)))
  74. return false;
  75. if (WARN_ON(!matching_access(10, 2, 11, 1)))
  76. return false;
  77. if (WARN_ON(!matching_access(10, 1, 9, 2)))
  78. return false;
  79. if (WARN_ON(matching_access(10, 1, 11, 1)))
  80. return false;
  81. if (WARN_ON(matching_access(9, 1, 10, 1)))
  82. return false;
  83. /*
  84. * An access of size 0 could match another access, as demonstrated here.
  85. * Rather than add more comparisons to 'matching_access()', which would
  86. * end up in the fast-path for *all* checks, check_access() simply
  87. * returns for all accesses of size 0.
  88. */
  89. if (WARN_ON(!matching_access(8, 8, 12, 0)))
  90. return false;
  91. return true;
  92. }
  93. /*
  94. * Correct memory barrier instrumentation is critical to avoiding false
  95. * positives: simple test to check at boot certain barriers are always properly
  96. * instrumented. See kcsan_test for a more complete test.
  97. */
  98. static DEFINE_SPINLOCK(test_spinlock);
  99. static bool __init test_barrier(void)
  100. {
  101. #ifdef CONFIG_KCSAN_WEAK_MEMORY
  102. struct kcsan_scoped_access *reorder_access = &current->kcsan_ctx.reorder_access;
  103. #else
  104. struct kcsan_scoped_access *reorder_access = NULL;
  105. #endif
  106. bool ret = true;
  107. arch_spinlock_t arch_spinlock = __ARCH_SPIN_LOCK_UNLOCKED;
  108. atomic_t dummy;
  109. long test_var;
  110. if (!reorder_access || !IS_ENABLED(CONFIG_SMP))
  111. return true;
  112. #define __KCSAN_CHECK_BARRIER(access_type, barrier, name) \
  113. do { \
  114. reorder_access->type = (access_type) | KCSAN_ACCESS_SCOPED; \
  115. reorder_access->size = 1; \
  116. barrier; \
  117. if (reorder_access->size != 0) { \
  118. pr_err("improperly instrumented type=(" #access_type "): " name "\n"); \
  119. ret = false; \
  120. } \
  121. } while (0)
  122. #define KCSAN_CHECK_READ_BARRIER(b) __KCSAN_CHECK_BARRIER(0, b, #b)
  123. #define KCSAN_CHECK_WRITE_BARRIER(b) __KCSAN_CHECK_BARRIER(KCSAN_ACCESS_WRITE, b, #b)
  124. #define KCSAN_CHECK_RW_BARRIER(b) __KCSAN_CHECK_BARRIER(KCSAN_ACCESS_WRITE | KCSAN_ACCESS_COMPOUND, b, #b)
  125. kcsan_nestable_atomic_begin(); /* No watchpoints in called functions. */
  126. KCSAN_CHECK_READ_BARRIER(mb());
  127. KCSAN_CHECK_READ_BARRIER(rmb());
  128. KCSAN_CHECK_READ_BARRIER(smp_mb());
  129. KCSAN_CHECK_READ_BARRIER(smp_rmb());
  130. KCSAN_CHECK_READ_BARRIER(dma_rmb());
  131. KCSAN_CHECK_READ_BARRIER(smp_mb__before_atomic());
  132. KCSAN_CHECK_READ_BARRIER(smp_mb__after_atomic());
  133. KCSAN_CHECK_READ_BARRIER(smp_mb__after_spinlock());
  134. KCSAN_CHECK_READ_BARRIER(smp_store_mb(test_var, 0));
  135. KCSAN_CHECK_READ_BARRIER(smp_store_release(&test_var, 0));
  136. KCSAN_CHECK_READ_BARRIER(xchg(&test_var, 0));
  137. KCSAN_CHECK_READ_BARRIER(xchg_release(&test_var, 0));
  138. KCSAN_CHECK_READ_BARRIER(cmpxchg(&test_var, 0, 0));
  139. KCSAN_CHECK_READ_BARRIER(cmpxchg_release(&test_var, 0, 0));
  140. KCSAN_CHECK_READ_BARRIER(atomic_set_release(&dummy, 0));
  141. KCSAN_CHECK_READ_BARRIER(atomic_add_return(1, &dummy));
  142. KCSAN_CHECK_READ_BARRIER(atomic_add_return_release(1, &dummy));
  143. KCSAN_CHECK_READ_BARRIER(atomic_fetch_add(1, &dummy));
  144. KCSAN_CHECK_READ_BARRIER(atomic_fetch_add_release(1, &dummy));
  145. KCSAN_CHECK_READ_BARRIER(test_and_set_bit(0, &test_var));
  146. KCSAN_CHECK_READ_BARRIER(test_and_clear_bit(0, &test_var));
  147. KCSAN_CHECK_READ_BARRIER(test_and_change_bit(0, &test_var));
  148. KCSAN_CHECK_READ_BARRIER(clear_bit_unlock(0, &test_var));
  149. KCSAN_CHECK_READ_BARRIER(__clear_bit_unlock(0, &test_var));
  150. arch_spin_lock(&arch_spinlock);
  151. KCSAN_CHECK_READ_BARRIER(arch_spin_unlock(&arch_spinlock));
  152. spin_lock(&test_spinlock);
  153. KCSAN_CHECK_READ_BARRIER(spin_unlock(&test_spinlock));
  154. KCSAN_CHECK_WRITE_BARRIER(mb());
  155. KCSAN_CHECK_WRITE_BARRIER(wmb());
  156. KCSAN_CHECK_WRITE_BARRIER(smp_mb());
  157. KCSAN_CHECK_WRITE_BARRIER(smp_wmb());
  158. KCSAN_CHECK_WRITE_BARRIER(dma_wmb());
  159. KCSAN_CHECK_WRITE_BARRIER(smp_mb__before_atomic());
  160. KCSAN_CHECK_WRITE_BARRIER(smp_mb__after_atomic());
  161. KCSAN_CHECK_WRITE_BARRIER(smp_mb__after_spinlock());
  162. KCSAN_CHECK_WRITE_BARRIER(smp_store_mb(test_var, 0));
  163. KCSAN_CHECK_WRITE_BARRIER(smp_store_release(&test_var, 0));
  164. KCSAN_CHECK_WRITE_BARRIER(xchg(&test_var, 0));
  165. KCSAN_CHECK_WRITE_BARRIER(xchg_release(&test_var, 0));
  166. KCSAN_CHECK_WRITE_BARRIER(cmpxchg(&test_var, 0, 0));
  167. KCSAN_CHECK_WRITE_BARRIER(cmpxchg_release(&test_var, 0, 0));
  168. KCSAN_CHECK_WRITE_BARRIER(atomic_set_release(&dummy, 0));
  169. KCSAN_CHECK_WRITE_BARRIER(atomic_add_return(1, &dummy));
  170. KCSAN_CHECK_WRITE_BARRIER(atomic_add_return_release(1, &dummy));
  171. KCSAN_CHECK_WRITE_BARRIER(atomic_fetch_add(1, &dummy));
  172. KCSAN_CHECK_WRITE_BARRIER(atomic_fetch_add_release(1, &dummy));
  173. KCSAN_CHECK_WRITE_BARRIER(test_and_set_bit(0, &test_var));
  174. KCSAN_CHECK_WRITE_BARRIER(test_and_clear_bit(0, &test_var));
  175. KCSAN_CHECK_WRITE_BARRIER(test_and_change_bit(0, &test_var));
  176. KCSAN_CHECK_WRITE_BARRIER(clear_bit_unlock(0, &test_var));
  177. KCSAN_CHECK_WRITE_BARRIER(__clear_bit_unlock(0, &test_var));
  178. arch_spin_lock(&arch_spinlock);
  179. KCSAN_CHECK_WRITE_BARRIER(arch_spin_unlock(&arch_spinlock));
  180. spin_lock(&test_spinlock);
  181. KCSAN_CHECK_WRITE_BARRIER(spin_unlock(&test_spinlock));
  182. KCSAN_CHECK_RW_BARRIER(mb());
  183. KCSAN_CHECK_RW_BARRIER(wmb());
  184. KCSAN_CHECK_RW_BARRIER(rmb());
  185. KCSAN_CHECK_RW_BARRIER(smp_mb());
  186. KCSAN_CHECK_RW_BARRIER(smp_wmb());
  187. KCSAN_CHECK_RW_BARRIER(smp_rmb());
  188. KCSAN_CHECK_RW_BARRIER(dma_wmb());
  189. KCSAN_CHECK_RW_BARRIER(dma_rmb());
  190. KCSAN_CHECK_RW_BARRIER(smp_mb__before_atomic());
  191. KCSAN_CHECK_RW_BARRIER(smp_mb__after_atomic());
  192. KCSAN_CHECK_RW_BARRIER(smp_mb__after_spinlock());
  193. KCSAN_CHECK_RW_BARRIER(smp_store_mb(test_var, 0));
  194. KCSAN_CHECK_RW_BARRIER(smp_store_release(&test_var, 0));
  195. KCSAN_CHECK_RW_BARRIER(xchg(&test_var, 0));
  196. KCSAN_CHECK_RW_BARRIER(xchg_release(&test_var, 0));
  197. KCSAN_CHECK_RW_BARRIER(cmpxchg(&test_var, 0, 0));
  198. KCSAN_CHECK_RW_BARRIER(cmpxchg_release(&test_var, 0, 0));
  199. KCSAN_CHECK_RW_BARRIER(atomic_set_release(&dummy, 0));
  200. KCSAN_CHECK_RW_BARRIER(atomic_add_return(1, &dummy));
  201. KCSAN_CHECK_RW_BARRIER(atomic_add_return_release(1, &dummy));
  202. KCSAN_CHECK_RW_BARRIER(atomic_fetch_add(1, &dummy));
  203. KCSAN_CHECK_RW_BARRIER(atomic_fetch_add_release(1, &dummy));
  204. KCSAN_CHECK_RW_BARRIER(test_and_set_bit(0, &test_var));
  205. KCSAN_CHECK_RW_BARRIER(test_and_clear_bit(0, &test_var));
  206. KCSAN_CHECK_RW_BARRIER(test_and_change_bit(0, &test_var));
  207. KCSAN_CHECK_RW_BARRIER(clear_bit_unlock(0, &test_var));
  208. KCSAN_CHECK_RW_BARRIER(__clear_bit_unlock(0, &test_var));
  209. arch_spin_lock(&arch_spinlock);
  210. KCSAN_CHECK_RW_BARRIER(arch_spin_unlock(&arch_spinlock));
  211. spin_lock(&test_spinlock);
  212. KCSAN_CHECK_RW_BARRIER(spin_unlock(&test_spinlock));
  213. #ifdef clear_bit_unlock_is_negative_byte
  214. KCSAN_CHECK_RW_BARRIER(clear_bit_unlock_is_negative_byte(0, &test_var));
  215. KCSAN_CHECK_READ_BARRIER(clear_bit_unlock_is_negative_byte(0, &test_var));
  216. KCSAN_CHECK_WRITE_BARRIER(clear_bit_unlock_is_negative_byte(0, &test_var));
  217. #endif
  218. kcsan_nestable_atomic_end();
  219. return ret;
  220. }
  221. static int __init kcsan_selftest(void)
  222. {
  223. int passed = 0;
  224. int total = 0;
  225. #define RUN_TEST(do_test) \
  226. do { \
  227. ++total; \
  228. if (do_test()) \
  229. ++passed; \
  230. else \
  231. pr_err("selftest: " #do_test " failed"); \
  232. } while (0)
  233. RUN_TEST(test_requires);
  234. RUN_TEST(test_encode_decode);
  235. RUN_TEST(test_matching_access);
  236. RUN_TEST(test_barrier);
  237. pr_info("selftest: %d/%d tests passed\n", passed, total);
  238. if (passed != total)
  239. panic("selftests failed");
  240. return 0;
  241. }
  242. postcore_initcall(kcsan_selftest);