archrandom.h 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef _ASM_ARCHRANDOM_H
  3. #define _ASM_ARCHRANDOM_H
  4. #include <linux/arm-smccc.h>
  5. #include <linux/bug.h>
  6. #include <linux/kernel.h>
  7. #include <asm/cpufeature.h>
  8. #define ARM_SMCCC_TRNG_MIN_VERSION 0x10000UL
  9. extern bool smccc_trng_available;
  10. static inline bool __init smccc_probe_trng(void)
  11. {
  12. struct arm_smccc_res res;
  13. arm_smccc_1_1_invoke(ARM_SMCCC_TRNG_VERSION, &res);
  14. if ((s32)res.a0 < 0)
  15. return false;
  16. return res.a0 >= ARM_SMCCC_TRNG_MIN_VERSION;
  17. }
  18. static inline bool __arm64_rndr(unsigned long *v)
  19. {
  20. bool ok;
  21. /*
  22. * Reads of RNDR set PSTATE.NZCV to 0b0000 on success,
  23. * and set PSTATE.NZCV to 0b0100 otherwise.
  24. */
  25. asm volatile(
  26. __mrs_s("%0", SYS_RNDR_EL0) "\n"
  27. " cset %w1, ne\n"
  28. : "=r" (*v), "=r" (ok)
  29. :
  30. : "cc");
  31. return ok;
  32. }
  33. static inline bool __arm64_rndrrs(unsigned long *v)
  34. {
  35. bool ok;
  36. /*
  37. * Reads of RNDRRS set PSTATE.NZCV to 0b0000 on success,
  38. * and set PSTATE.NZCV to 0b0100 otherwise.
  39. */
  40. asm volatile(
  41. __mrs_s("%0", SYS_RNDRRS_EL0) "\n"
  42. " cset %w1, ne\n"
  43. : "=r" (*v), "=r" (ok)
  44. :
  45. : "cc");
  46. return ok;
  47. }
  48. static inline size_t __must_check arch_get_random_longs(unsigned long *v, size_t max_longs)
  49. {
  50. /*
  51. * Only support the generic interface after we have detected
  52. * the system wide capability, avoiding complexity with the
  53. * cpufeature code and with potential scheduling between CPUs
  54. * with and without the feature.
  55. */
  56. if (max_longs && cpus_have_const_cap(ARM64_HAS_RNG) && __arm64_rndr(v))
  57. return 1;
  58. return 0;
  59. }
  60. static inline size_t __must_check arch_get_random_seed_longs(unsigned long *v, size_t max_longs)
  61. {
  62. if (!max_longs)
  63. return 0;
  64. /*
  65. * We prefer the SMCCC call, since its semantics (return actual
  66. * hardware backed entropy) is closer to the idea behind this
  67. * function here than what even the RNDRSS register provides
  68. * (the output of a pseudo RNG freshly seeded by a TRNG).
  69. */
  70. if (smccc_trng_available) {
  71. struct arm_smccc_res res;
  72. max_longs = min_t(size_t, 3, max_longs);
  73. arm_smccc_1_1_invoke(ARM_SMCCC_TRNG_RND64, max_longs * 64, &res);
  74. if ((int)res.a0 >= 0) {
  75. switch (max_longs) {
  76. case 3:
  77. *v++ = res.a1;
  78. fallthrough;
  79. case 2:
  80. *v++ = res.a2;
  81. fallthrough;
  82. case 1:
  83. *v++ = res.a3;
  84. break;
  85. }
  86. return max_longs;
  87. }
  88. }
  89. /*
  90. * RNDRRS is not backed by an entropy source but by a DRBG that is
  91. * reseeded after each invocation. This is not a 100% fit but good
  92. * enough to implement this API if no other entropy source exists.
  93. */
  94. if (cpus_have_const_cap(ARM64_HAS_RNG) && __arm64_rndrrs(v))
  95. return 1;
  96. return 0;
  97. }
  98. static inline bool __init __early_cpu_has_rndr(void)
  99. {
  100. /* Open code as we run prior to the first call to cpufeature. */
  101. unsigned long ftr = read_sysreg_s(SYS_ID_AA64ISAR0_EL1);
  102. return (ftr >> ID_AA64ISAR0_EL1_RNDR_SHIFT) & 0xf;
  103. }
  104. static inline size_t __init __must_check
  105. arch_get_random_seed_longs_early(unsigned long *v, size_t max_longs)
  106. {
  107. WARN_ON(system_state != SYSTEM_BOOTING);
  108. if (!max_longs)
  109. return 0;
  110. if (smccc_trng_available) {
  111. struct arm_smccc_res res;
  112. max_longs = min_t(size_t, 3, max_longs);
  113. arm_smccc_1_1_invoke(ARM_SMCCC_TRNG_RND64, max_longs * 64, &res);
  114. if ((int)res.a0 >= 0) {
  115. switch (max_longs) {
  116. case 3:
  117. *v++ = res.a1;
  118. fallthrough;
  119. case 2:
  120. *v++ = res.a2;
  121. fallthrough;
  122. case 1:
  123. *v++ = res.a3;
  124. break;
  125. }
  126. return max_longs;
  127. }
  128. }
  129. if (__early_cpu_has_rndr() && __arm64_rndr(v))
  130. return 1;
  131. return 0;
  132. }
  133. #define arch_get_random_seed_longs_early arch_get_random_seed_longs_early
  134. #endif /* _ASM_ARCHRANDOM_H */