sha1_ssse3_glue.c 8.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362
  1. // SPDX-License-Identifier: GPL-2.0-or-later
  2. /*
  3. * Cryptographic API.
  4. *
  5. * Glue code for the SHA1 Secure Hash Algorithm assembler implementation using
  6. * Supplemental SSE3 instructions.
  7. *
  8. * This file is based on sha1_generic.c
  9. *
  10. * Copyright (c) Alan Smithee.
  11. * Copyright (c) Andrew McDonald <[email protected]>
  12. * Copyright (c) Jean-Francois Dive <[email protected]>
  13. * Copyright (c) Mathias Krause <[email protected]>
  14. * Copyright (c) Chandramouli Narayanan <[email protected]>
  15. */
  16. #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
  17. #include <crypto/internal/hash.h>
  18. #include <crypto/internal/simd.h>
  19. #include <linux/init.h>
  20. #include <linux/module.h>
  21. #include <linux/mm.h>
  22. #include <linux/types.h>
  23. #include <crypto/sha1.h>
  24. #include <crypto/sha1_base.h>
  25. #include <asm/cpu_device_id.h>
  26. #include <asm/simd.h>
  27. static const struct x86_cpu_id module_cpu_ids[] = {
  28. X86_MATCH_FEATURE(X86_FEATURE_AVX2, NULL),
  29. X86_MATCH_FEATURE(X86_FEATURE_AVX, NULL),
  30. X86_MATCH_FEATURE(X86_FEATURE_SSSE3, NULL),
  31. {}
  32. };
  33. MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
  34. static int sha1_update(struct shash_desc *desc, const u8 *data,
  35. unsigned int len, sha1_block_fn *sha1_xform)
  36. {
  37. struct sha1_state *sctx = shash_desc_ctx(desc);
  38. if (!crypto_simd_usable() ||
  39. (sctx->count % SHA1_BLOCK_SIZE) + len < SHA1_BLOCK_SIZE)
  40. return crypto_sha1_update(desc, data, len);
  41. /*
  42. * Make sure struct sha1_state begins directly with the SHA1
  43. * 160-bit internal state, as this is what the asm functions expect.
  44. */
  45. BUILD_BUG_ON(offsetof(struct sha1_state, state) != 0);
  46. kernel_fpu_begin();
  47. sha1_base_do_update(desc, data, len, sha1_xform);
  48. kernel_fpu_end();
  49. return 0;
  50. }
  51. static int sha1_finup(struct shash_desc *desc, const u8 *data,
  52. unsigned int len, u8 *out, sha1_block_fn *sha1_xform)
  53. {
  54. if (!crypto_simd_usable())
  55. return crypto_sha1_finup(desc, data, len, out);
  56. kernel_fpu_begin();
  57. if (len)
  58. sha1_base_do_update(desc, data, len, sha1_xform);
  59. sha1_base_do_finalize(desc, sha1_xform);
  60. kernel_fpu_end();
  61. return sha1_base_finish(desc, out);
  62. }
  63. asmlinkage void sha1_transform_ssse3(struct sha1_state *state,
  64. const u8 *data, int blocks);
  65. static int sha1_ssse3_update(struct shash_desc *desc, const u8 *data,
  66. unsigned int len)
  67. {
  68. return sha1_update(desc, data, len, sha1_transform_ssse3);
  69. }
  70. static int sha1_ssse3_finup(struct shash_desc *desc, const u8 *data,
  71. unsigned int len, u8 *out)
  72. {
  73. return sha1_finup(desc, data, len, out, sha1_transform_ssse3);
  74. }
  75. /* Add padding and return the message digest. */
  76. static int sha1_ssse3_final(struct shash_desc *desc, u8 *out)
  77. {
  78. return sha1_ssse3_finup(desc, NULL, 0, out);
  79. }
  80. static struct shash_alg sha1_ssse3_alg = {
  81. .digestsize = SHA1_DIGEST_SIZE,
  82. .init = sha1_base_init,
  83. .update = sha1_ssse3_update,
  84. .final = sha1_ssse3_final,
  85. .finup = sha1_ssse3_finup,
  86. .descsize = sizeof(struct sha1_state),
  87. .base = {
  88. .cra_name = "sha1",
  89. .cra_driver_name = "sha1-ssse3",
  90. .cra_priority = 150,
  91. .cra_blocksize = SHA1_BLOCK_SIZE,
  92. .cra_module = THIS_MODULE,
  93. }
  94. };
  95. static int register_sha1_ssse3(void)
  96. {
  97. if (boot_cpu_has(X86_FEATURE_SSSE3))
  98. return crypto_register_shash(&sha1_ssse3_alg);
  99. return 0;
  100. }
  101. static void unregister_sha1_ssse3(void)
  102. {
  103. if (boot_cpu_has(X86_FEATURE_SSSE3))
  104. crypto_unregister_shash(&sha1_ssse3_alg);
  105. }
  106. asmlinkage void sha1_transform_avx(struct sha1_state *state,
  107. const u8 *data, int blocks);
  108. static int sha1_avx_update(struct shash_desc *desc, const u8 *data,
  109. unsigned int len)
  110. {
  111. return sha1_update(desc, data, len, sha1_transform_avx);
  112. }
  113. static int sha1_avx_finup(struct shash_desc *desc, const u8 *data,
  114. unsigned int len, u8 *out)
  115. {
  116. return sha1_finup(desc, data, len, out, sha1_transform_avx);
  117. }
  118. static int sha1_avx_final(struct shash_desc *desc, u8 *out)
  119. {
  120. return sha1_avx_finup(desc, NULL, 0, out);
  121. }
  122. static struct shash_alg sha1_avx_alg = {
  123. .digestsize = SHA1_DIGEST_SIZE,
  124. .init = sha1_base_init,
  125. .update = sha1_avx_update,
  126. .final = sha1_avx_final,
  127. .finup = sha1_avx_finup,
  128. .descsize = sizeof(struct sha1_state),
  129. .base = {
  130. .cra_name = "sha1",
  131. .cra_driver_name = "sha1-avx",
  132. .cra_priority = 160,
  133. .cra_blocksize = SHA1_BLOCK_SIZE,
  134. .cra_module = THIS_MODULE,
  135. }
  136. };
  137. static bool avx_usable(void)
  138. {
  139. if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
  140. if (boot_cpu_has(X86_FEATURE_AVX))
  141. pr_info("AVX detected but unusable.\n");
  142. return false;
  143. }
  144. return true;
  145. }
  146. static int register_sha1_avx(void)
  147. {
  148. if (avx_usable())
  149. return crypto_register_shash(&sha1_avx_alg);
  150. return 0;
  151. }
  152. static void unregister_sha1_avx(void)
  153. {
  154. if (avx_usable())
  155. crypto_unregister_shash(&sha1_avx_alg);
  156. }
  157. #define SHA1_AVX2_BLOCK_OPTSIZE 4 /* optimal 4*64 bytes of SHA1 blocks */
  158. asmlinkage void sha1_transform_avx2(struct sha1_state *state,
  159. const u8 *data, int blocks);
  160. static bool avx2_usable(void)
  161. {
  162. if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2)
  163. && boot_cpu_has(X86_FEATURE_BMI1)
  164. && boot_cpu_has(X86_FEATURE_BMI2))
  165. return true;
  166. return false;
  167. }
  168. static void sha1_apply_transform_avx2(struct sha1_state *state,
  169. const u8 *data, int blocks)
  170. {
  171. /* Select the optimal transform based on data block size */
  172. if (blocks >= SHA1_AVX2_BLOCK_OPTSIZE)
  173. sha1_transform_avx2(state, data, blocks);
  174. else
  175. sha1_transform_avx(state, data, blocks);
  176. }
  177. static int sha1_avx2_update(struct shash_desc *desc, const u8 *data,
  178. unsigned int len)
  179. {
  180. return sha1_update(desc, data, len, sha1_apply_transform_avx2);
  181. }
  182. static int sha1_avx2_finup(struct shash_desc *desc, const u8 *data,
  183. unsigned int len, u8 *out)
  184. {
  185. return sha1_finup(desc, data, len, out, sha1_apply_transform_avx2);
  186. }
  187. static int sha1_avx2_final(struct shash_desc *desc, u8 *out)
  188. {
  189. return sha1_avx2_finup(desc, NULL, 0, out);
  190. }
  191. static struct shash_alg sha1_avx2_alg = {
  192. .digestsize = SHA1_DIGEST_SIZE,
  193. .init = sha1_base_init,
  194. .update = sha1_avx2_update,
  195. .final = sha1_avx2_final,
  196. .finup = sha1_avx2_finup,
  197. .descsize = sizeof(struct sha1_state),
  198. .base = {
  199. .cra_name = "sha1",
  200. .cra_driver_name = "sha1-avx2",
  201. .cra_priority = 170,
  202. .cra_blocksize = SHA1_BLOCK_SIZE,
  203. .cra_module = THIS_MODULE,
  204. }
  205. };
  206. static int register_sha1_avx2(void)
  207. {
  208. if (avx2_usable())
  209. return crypto_register_shash(&sha1_avx2_alg);
  210. return 0;
  211. }
  212. static void unregister_sha1_avx2(void)
  213. {
  214. if (avx2_usable())
  215. crypto_unregister_shash(&sha1_avx2_alg);
  216. }
  217. #ifdef CONFIG_AS_SHA1_NI
  218. asmlinkage void sha1_ni_transform(struct sha1_state *digest, const u8 *data,
  219. int rounds);
  220. static int sha1_ni_update(struct shash_desc *desc, const u8 *data,
  221. unsigned int len)
  222. {
  223. return sha1_update(desc, data, len, sha1_ni_transform);
  224. }
  225. static int sha1_ni_finup(struct shash_desc *desc, const u8 *data,
  226. unsigned int len, u8 *out)
  227. {
  228. return sha1_finup(desc, data, len, out, sha1_ni_transform);
  229. }
  230. static int sha1_ni_final(struct shash_desc *desc, u8 *out)
  231. {
  232. return sha1_ni_finup(desc, NULL, 0, out);
  233. }
  234. static struct shash_alg sha1_ni_alg = {
  235. .digestsize = SHA1_DIGEST_SIZE,
  236. .init = sha1_base_init,
  237. .update = sha1_ni_update,
  238. .final = sha1_ni_final,
  239. .finup = sha1_ni_finup,
  240. .descsize = sizeof(struct sha1_state),
  241. .base = {
  242. .cra_name = "sha1",
  243. .cra_driver_name = "sha1-ni",
  244. .cra_priority = 250,
  245. .cra_blocksize = SHA1_BLOCK_SIZE,
  246. .cra_module = THIS_MODULE,
  247. }
  248. };
  249. static int register_sha1_ni(void)
  250. {
  251. if (boot_cpu_has(X86_FEATURE_SHA_NI))
  252. return crypto_register_shash(&sha1_ni_alg);
  253. return 0;
  254. }
  255. static void unregister_sha1_ni(void)
  256. {
  257. if (boot_cpu_has(X86_FEATURE_SHA_NI))
  258. crypto_unregister_shash(&sha1_ni_alg);
  259. }
  260. #else
  261. static inline int register_sha1_ni(void) { return 0; }
  262. static inline void unregister_sha1_ni(void) { }
  263. #endif
  264. static int __init sha1_ssse3_mod_init(void)
  265. {
  266. if (!x86_match_cpu(module_cpu_ids))
  267. return -ENODEV;
  268. if (register_sha1_ssse3())
  269. goto fail;
  270. if (register_sha1_avx()) {
  271. unregister_sha1_ssse3();
  272. goto fail;
  273. }
  274. if (register_sha1_avx2()) {
  275. unregister_sha1_avx();
  276. unregister_sha1_ssse3();
  277. goto fail;
  278. }
  279. if (register_sha1_ni()) {
  280. unregister_sha1_avx2();
  281. unregister_sha1_avx();
  282. unregister_sha1_ssse3();
  283. goto fail;
  284. }
  285. return 0;
  286. fail:
  287. return -ENODEV;
  288. }
  289. static void __exit sha1_ssse3_mod_fini(void)
  290. {
  291. unregister_sha1_ni();
  292. unregister_sha1_avx2();
  293. unregister_sha1_avx();
  294. unregister_sha1_ssse3();
  295. }
  296. module_init(sha1_ssse3_mod_init);
  297. module_exit(sha1_ssse3_mod_fini);
  298. MODULE_LICENSE("GPL");
  299. MODULE_DESCRIPTION("SHA1 Secure Hash Algorithm, Supplemental SSE3 accelerated");
  300. MODULE_ALIAS_CRYPTO("sha1");
  301. MODULE_ALIAS_CRYPTO("sha1-ssse3");
  302. MODULE_ALIAS_CRYPTO("sha1-avx");
  303. MODULE_ALIAS_CRYPTO("sha1-avx2");
  304. #ifdef CONFIG_AS_SHA1_NI
  305. MODULE_ALIAS_CRYPTO("sha1-ni");
  306. #endif