sha256-glue.c 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195
  1. // SPDX-License-Identifier: GPL-2.0-or-later
  2. /*
  3. * Linux/arm64 port of the OpenSSL SHA256 implementation for AArch64
  4. *
  5. * Copyright (c) 2016 Linaro Ltd. <[email protected]>
  6. */
  7. #include <asm/hwcap.h>
  8. #include <asm/neon.h>
  9. #include <asm/simd.h>
  10. #include <crypto/internal/hash.h>
  11. #include <crypto/internal/simd.h>
  12. #include <crypto/sha2.h>
  13. #include <crypto/sha256_base.h>
  14. #include <linux/types.h>
  15. #include <linux/string.h>
  16. MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash for arm64");
  17. MODULE_AUTHOR("Andy Polyakov <[email protected]>");
  18. MODULE_AUTHOR("Ard Biesheuvel <[email protected]>");
  19. MODULE_LICENSE("GPL v2");
  20. MODULE_ALIAS_CRYPTO("sha224");
  21. MODULE_ALIAS_CRYPTO("sha256");
  22. asmlinkage void sha256_block_data_order(u32 *digest, const void *data,
  23. unsigned int num_blks);
  24. EXPORT_SYMBOL(sha256_block_data_order);
  25. static void __sha256_block_data_order(struct sha256_state *sst, u8 const *src,
  26. int blocks)
  27. {
  28. sha256_block_data_order(sst->state, src, blocks);
  29. }
  30. asmlinkage void sha256_block_neon(u32 *digest, const void *data,
  31. unsigned int num_blks);
  32. static void __sha256_block_neon(struct sha256_state *sst, u8 const *src,
  33. int blocks)
  34. {
  35. sha256_block_neon(sst->state, src, blocks);
  36. }
  37. static int crypto_sha256_arm64_update(struct shash_desc *desc, const u8 *data,
  38. unsigned int len)
  39. {
  40. return sha256_base_do_update(desc, data, len,
  41. __sha256_block_data_order);
  42. }
  43. static int crypto_sha256_arm64_finup(struct shash_desc *desc, const u8 *data,
  44. unsigned int len, u8 *out)
  45. {
  46. if (len)
  47. sha256_base_do_update(desc, data, len,
  48. __sha256_block_data_order);
  49. sha256_base_do_finalize(desc, __sha256_block_data_order);
  50. return sha256_base_finish(desc, out);
  51. }
  52. static int crypto_sha256_arm64_final(struct shash_desc *desc, u8 *out)
  53. {
  54. return crypto_sha256_arm64_finup(desc, NULL, 0, out);
  55. }
  56. static struct shash_alg algs[] = { {
  57. .digestsize = SHA256_DIGEST_SIZE,
  58. .init = sha256_base_init,
  59. .update = crypto_sha256_arm64_update,
  60. .final = crypto_sha256_arm64_final,
  61. .finup = crypto_sha256_arm64_finup,
  62. .descsize = sizeof(struct sha256_state),
  63. .base.cra_name = "sha256",
  64. .base.cra_driver_name = "sha256-arm64",
  65. .base.cra_priority = 125,
  66. .base.cra_blocksize = SHA256_BLOCK_SIZE,
  67. .base.cra_module = THIS_MODULE,
  68. }, {
  69. .digestsize = SHA224_DIGEST_SIZE,
  70. .init = sha224_base_init,
  71. .update = crypto_sha256_arm64_update,
  72. .final = crypto_sha256_arm64_final,
  73. .finup = crypto_sha256_arm64_finup,
  74. .descsize = sizeof(struct sha256_state),
  75. .base.cra_name = "sha224",
  76. .base.cra_driver_name = "sha224-arm64",
  77. .base.cra_priority = 125,
  78. .base.cra_blocksize = SHA224_BLOCK_SIZE,
  79. .base.cra_module = THIS_MODULE,
  80. } };
  81. static int sha256_update_neon(struct shash_desc *desc, const u8 *data,
  82. unsigned int len)
  83. {
  84. struct sha256_state *sctx = shash_desc_ctx(desc);
  85. if (!crypto_simd_usable())
  86. return sha256_base_do_update(desc, data, len,
  87. __sha256_block_data_order);
  88. while (len > 0) {
  89. unsigned int chunk = len;
  90. /*
  91. * Don't hog the CPU for the entire time it takes to process all
  92. * input when running on a preemptible kernel, but process the
  93. * data block by block instead.
  94. */
  95. if (IS_ENABLED(CONFIG_PREEMPTION) &&
  96. chunk + sctx->count % SHA256_BLOCK_SIZE > SHA256_BLOCK_SIZE)
  97. chunk = SHA256_BLOCK_SIZE -
  98. sctx->count % SHA256_BLOCK_SIZE;
  99. kernel_neon_begin();
  100. sha256_base_do_update(desc, data, chunk, __sha256_block_neon);
  101. kernel_neon_end();
  102. data += chunk;
  103. len -= chunk;
  104. }
  105. return 0;
  106. }
  107. static int sha256_finup_neon(struct shash_desc *desc, const u8 *data,
  108. unsigned int len, u8 *out)
  109. {
  110. if (!crypto_simd_usable()) {
  111. if (len)
  112. sha256_base_do_update(desc, data, len,
  113. __sha256_block_data_order);
  114. sha256_base_do_finalize(desc, __sha256_block_data_order);
  115. } else {
  116. if (len)
  117. sha256_update_neon(desc, data, len);
  118. kernel_neon_begin();
  119. sha256_base_do_finalize(desc, __sha256_block_neon);
  120. kernel_neon_end();
  121. }
  122. return sha256_base_finish(desc, out);
  123. }
  124. static int sha256_final_neon(struct shash_desc *desc, u8 *out)
  125. {
  126. return sha256_finup_neon(desc, NULL, 0, out);
  127. }
  128. static struct shash_alg neon_algs[] = { {
  129. .digestsize = SHA256_DIGEST_SIZE,
  130. .init = sha256_base_init,
  131. .update = sha256_update_neon,
  132. .final = sha256_final_neon,
  133. .finup = sha256_finup_neon,
  134. .descsize = sizeof(struct sha256_state),
  135. .base.cra_name = "sha256",
  136. .base.cra_driver_name = "sha256-arm64-neon",
  137. .base.cra_priority = 150,
  138. .base.cra_blocksize = SHA256_BLOCK_SIZE,
  139. .base.cra_module = THIS_MODULE,
  140. }, {
  141. .digestsize = SHA224_DIGEST_SIZE,
  142. .init = sha224_base_init,
  143. .update = sha256_update_neon,
  144. .final = sha256_final_neon,
  145. .finup = sha256_finup_neon,
  146. .descsize = sizeof(struct sha256_state),
  147. .base.cra_name = "sha224",
  148. .base.cra_driver_name = "sha224-arm64-neon",
  149. .base.cra_priority = 150,
  150. .base.cra_blocksize = SHA224_BLOCK_SIZE,
  151. .base.cra_module = THIS_MODULE,
  152. } };
  153. static int __init sha256_mod_init(void)
  154. {
  155. int ret = crypto_register_shashes(algs, ARRAY_SIZE(algs));
  156. if (ret)
  157. return ret;
  158. if (cpu_have_named_feature(ASIMD)) {
  159. ret = crypto_register_shashes(neon_algs, ARRAY_SIZE(neon_algs));
  160. if (ret)
  161. crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
  162. }
  163. return ret;
  164. }
  165. static void __exit sha256_mod_fini(void)
  166. {
  167. if (cpu_have_named_feature(ASIMD))
  168. crypto_unregister_shashes(neon_algs, ARRAY_SIZE(neon_algs));
  169. crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
  170. }
  171. module_init(sha256_mod_init);
  172. module_exit(sha256_mod_fini);