crc32-ce-glue.c 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * Accelerated CRC32(C) using ARM CRC, NEON and Crypto Extensions instructions
  4. *
  5. * Copyright (C) 2016 Linaro Ltd <[email protected]>
  6. */
  7. #include <linux/cpufeature.h>
  8. #include <linux/crc32.h>
  9. #include <linux/init.h>
  10. #include <linux/kernel.h>
  11. #include <linux/module.h>
  12. #include <linux/string.h>
  13. #include <crypto/internal/hash.h>
  14. #include <crypto/internal/simd.h>
  15. #include <asm/hwcap.h>
  16. #include <asm/neon.h>
  17. #include <asm/simd.h>
  18. #include <asm/unaligned.h>
  19. #define PMULL_MIN_LEN 64L /* minimum size of buffer
  20. * for crc32_pmull_le_16 */
  21. #define SCALE_F 16L /* size of NEON register */
  22. asmlinkage u32 crc32_pmull_le(const u8 buf[], u32 len, u32 init_crc);
  23. asmlinkage u32 crc32_armv8_le(u32 init_crc, const u8 buf[], u32 len);
  24. asmlinkage u32 crc32c_pmull_le(const u8 buf[], u32 len, u32 init_crc);
  25. asmlinkage u32 crc32c_armv8_le(u32 init_crc, const u8 buf[], u32 len);
  26. static u32 (*fallback_crc32)(u32 init_crc, const u8 buf[], u32 len);
  27. static u32 (*fallback_crc32c)(u32 init_crc, const u8 buf[], u32 len);
  28. static int crc32_cra_init(struct crypto_tfm *tfm)
  29. {
  30. u32 *key = crypto_tfm_ctx(tfm);
  31. *key = 0;
  32. return 0;
  33. }
  34. static int crc32c_cra_init(struct crypto_tfm *tfm)
  35. {
  36. u32 *key = crypto_tfm_ctx(tfm);
  37. *key = ~0;
  38. return 0;
  39. }
  40. static int crc32_setkey(struct crypto_shash *hash, const u8 *key,
  41. unsigned int keylen)
  42. {
  43. u32 *mctx = crypto_shash_ctx(hash);
  44. if (keylen != sizeof(u32))
  45. return -EINVAL;
  46. *mctx = le32_to_cpup((__le32 *)key);
  47. return 0;
  48. }
  49. static int crc32_init(struct shash_desc *desc)
  50. {
  51. u32 *mctx = crypto_shash_ctx(desc->tfm);
  52. u32 *crc = shash_desc_ctx(desc);
  53. *crc = *mctx;
  54. return 0;
  55. }
  56. static int crc32_update(struct shash_desc *desc, const u8 *data,
  57. unsigned int length)
  58. {
  59. u32 *crc = shash_desc_ctx(desc);
  60. *crc = crc32_armv8_le(*crc, data, length);
  61. return 0;
  62. }
  63. static int crc32c_update(struct shash_desc *desc, const u8 *data,
  64. unsigned int length)
  65. {
  66. u32 *crc = shash_desc_ctx(desc);
  67. *crc = crc32c_armv8_le(*crc, data, length);
  68. return 0;
  69. }
  70. static int crc32_final(struct shash_desc *desc, u8 *out)
  71. {
  72. u32 *crc = shash_desc_ctx(desc);
  73. put_unaligned_le32(*crc, out);
  74. return 0;
  75. }
  76. static int crc32c_final(struct shash_desc *desc, u8 *out)
  77. {
  78. u32 *crc = shash_desc_ctx(desc);
  79. put_unaligned_le32(~*crc, out);
  80. return 0;
  81. }
  82. static int crc32_pmull_update(struct shash_desc *desc, const u8 *data,
  83. unsigned int length)
  84. {
  85. u32 *crc = shash_desc_ctx(desc);
  86. unsigned int l;
  87. if (crypto_simd_usable()) {
  88. if ((u32)data % SCALE_F) {
  89. l = min_t(u32, length, SCALE_F - ((u32)data % SCALE_F));
  90. *crc = fallback_crc32(*crc, data, l);
  91. data += l;
  92. length -= l;
  93. }
  94. if (length >= PMULL_MIN_LEN) {
  95. l = round_down(length, SCALE_F);
  96. kernel_neon_begin();
  97. *crc = crc32_pmull_le(data, l, *crc);
  98. kernel_neon_end();
  99. data += l;
  100. length -= l;
  101. }
  102. }
  103. if (length > 0)
  104. *crc = fallback_crc32(*crc, data, length);
  105. return 0;
  106. }
  107. static int crc32c_pmull_update(struct shash_desc *desc, const u8 *data,
  108. unsigned int length)
  109. {
  110. u32 *crc = shash_desc_ctx(desc);
  111. unsigned int l;
  112. if (crypto_simd_usable()) {
  113. if ((u32)data % SCALE_F) {
  114. l = min_t(u32, length, SCALE_F - ((u32)data % SCALE_F));
  115. *crc = fallback_crc32c(*crc, data, l);
  116. data += l;
  117. length -= l;
  118. }
  119. if (length >= PMULL_MIN_LEN) {
  120. l = round_down(length, SCALE_F);
  121. kernel_neon_begin();
  122. *crc = crc32c_pmull_le(data, l, *crc);
  123. kernel_neon_end();
  124. data += l;
  125. length -= l;
  126. }
  127. }
  128. if (length > 0)
  129. *crc = fallback_crc32c(*crc, data, length);
  130. return 0;
  131. }
  132. static struct shash_alg crc32_pmull_algs[] = { {
  133. .setkey = crc32_setkey,
  134. .init = crc32_init,
  135. .update = crc32_update,
  136. .final = crc32_final,
  137. .descsize = sizeof(u32),
  138. .digestsize = sizeof(u32),
  139. .base.cra_ctxsize = sizeof(u32),
  140. .base.cra_init = crc32_cra_init,
  141. .base.cra_name = "crc32",
  142. .base.cra_driver_name = "crc32-arm-ce",
  143. .base.cra_priority = 200,
  144. .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
  145. .base.cra_blocksize = 1,
  146. .base.cra_module = THIS_MODULE,
  147. }, {
  148. .setkey = crc32_setkey,
  149. .init = crc32_init,
  150. .update = crc32c_update,
  151. .final = crc32c_final,
  152. .descsize = sizeof(u32),
  153. .digestsize = sizeof(u32),
  154. .base.cra_ctxsize = sizeof(u32),
  155. .base.cra_init = crc32c_cra_init,
  156. .base.cra_name = "crc32c",
  157. .base.cra_driver_name = "crc32c-arm-ce",
  158. .base.cra_priority = 200,
  159. .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
  160. .base.cra_blocksize = 1,
  161. .base.cra_module = THIS_MODULE,
  162. } };
  163. static int __init crc32_pmull_mod_init(void)
  164. {
  165. if (elf_hwcap2 & HWCAP2_PMULL) {
  166. crc32_pmull_algs[0].update = crc32_pmull_update;
  167. crc32_pmull_algs[1].update = crc32c_pmull_update;
  168. if (elf_hwcap2 & HWCAP2_CRC32) {
  169. fallback_crc32 = crc32_armv8_le;
  170. fallback_crc32c = crc32c_armv8_le;
  171. } else {
  172. fallback_crc32 = crc32_le;
  173. fallback_crc32c = __crc32c_le;
  174. }
  175. } else if (!(elf_hwcap2 & HWCAP2_CRC32)) {
  176. return -ENODEV;
  177. }
  178. return crypto_register_shashes(crc32_pmull_algs,
  179. ARRAY_SIZE(crc32_pmull_algs));
  180. }
  181. static void __exit crc32_pmull_mod_exit(void)
  182. {
  183. crypto_unregister_shashes(crc32_pmull_algs,
  184. ARRAY_SIZE(crc32_pmull_algs));
  185. }
  186. static const struct cpu_feature __maybe_unused crc32_cpu_feature[] = {
  187. { cpu_feature(CRC32) }, { cpu_feature(PMULL) }, { }
  188. };
  189. MODULE_DEVICE_TABLE(cpu, crc32_cpu_feature);
  190. module_init(crc32_pmull_mod_init);
  191. module_exit(crc32_pmull_mod_exit);
  192. MODULE_AUTHOR("Ard Biesheuvel <[email protected]>");
  193. MODULE_LICENSE("GPL v2");
  194. MODULE_ALIAS_CRYPTO("crc32");
  195. MODULE_ALIAS_CRYPTO("crc32c");