crc32-vx.c 8.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Crypto-API module for CRC-32 algorithms implemented with the
  4. * z/Architecture Vector Extension Facility.
  5. *
  6. * Copyright IBM Corp. 2015
  7. * Author(s): Hendrik Brueckner <[email protected]>
  8. */
  9. #define KMSG_COMPONENT "crc32-vx"
  10. #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
  11. #include <linux/module.h>
  12. #include <linux/cpufeature.h>
  13. #include <linux/crc32.h>
  14. #include <crypto/internal/hash.h>
  15. #include <asm/fpu/api.h>
  16. #define CRC32_BLOCK_SIZE 1
  17. #define CRC32_DIGEST_SIZE 4
  18. #define VX_MIN_LEN 64
  19. #define VX_ALIGNMENT 16L
  20. #define VX_ALIGN_MASK (VX_ALIGNMENT - 1)
  21. struct crc_ctx {
  22. u32 key;
  23. };
  24. struct crc_desc_ctx {
  25. u32 crc;
  26. };
  27. /* Prototypes for functions in assembly files */
  28. u32 crc32_le_vgfm_16(u32 crc, unsigned char const *buf, size_t size);
  29. u32 crc32_be_vgfm_16(u32 crc, unsigned char const *buf, size_t size);
  30. u32 crc32c_le_vgfm_16(u32 crc, unsigned char const *buf, size_t size);
  31. /*
  32. * DEFINE_CRC32_VX() - Define a CRC-32 function using the vector extension
  33. *
  34. * Creates a function to perform a particular CRC-32 computation. Depending
  35. * on the message buffer, the hardware-accelerated or software implementation
  36. * is used. Note that the message buffer is aligned to improve fetch
  37. * operations of VECTOR LOAD MULTIPLE instructions.
  38. *
  39. */
  40. #define DEFINE_CRC32_VX(___fname, ___crc32_vx, ___crc32_sw) \
  41. static u32 __pure ___fname(u32 crc, \
  42. unsigned char const *data, size_t datalen) \
  43. { \
  44. struct kernel_fpu vxstate; \
  45. unsigned long prealign, aligned, remaining; \
  46. \
  47. if (datalen < VX_MIN_LEN + VX_ALIGN_MASK) \
  48. return ___crc32_sw(crc, data, datalen); \
  49. \
  50. if ((unsigned long)data & VX_ALIGN_MASK) { \
  51. prealign = VX_ALIGNMENT - \
  52. ((unsigned long)data & VX_ALIGN_MASK); \
  53. datalen -= prealign; \
  54. crc = ___crc32_sw(crc, data, prealign); \
  55. data = (void *)((unsigned long)data + prealign); \
  56. } \
  57. \
  58. aligned = datalen & ~VX_ALIGN_MASK; \
  59. remaining = datalen & VX_ALIGN_MASK; \
  60. \
  61. kernel_fpu_begin(&vxstate, KERNEL_VXR_LOW); \
  62. crc = ___crc32_vx(crc, data, aligned); \
  63. kernel_fpu_end(&vxstate, KERNEL_VXR_LOW); \
  64. \
  65. if (remaining) \
  66. crc = ___crc32_sw(crc, data + aligned, remaining); \
  67. \
  68. return crc; \
  69. }
  70. DEFINE_CRC32_VX(crc32_le_vx, crc32_le_vgfm_16, crc32_le)
  71. DEFINE_CRC32_VX(crc32_be_vx, crc32_be_vgfm_16, crc32_be)
  72. DEFINE_CRC32_VX(crc32c_le_vx, crc32c_le_vgfm_16, __crc32c_le)
  73. static int crc32_vx_cra_init_zero(struct crypto_tfm *tfm)
  74. {
  75. struct crc_ctx *mctx = crypto_tfm_ctx(tfm);
  76. mctx->key = 0;
  77. return 0;
  78. }
  79. static int crc32_vx_cra_init_invert(struct crypto_tfm *tfm)
  80. {
  81. struct crc_ctx *mctx = crypto_tfm_ctx(tfm);
  82. mctx->key = ~0;
  83. return 0;
  84. }
  85. static int crc32_vx_init(struct shash_desc *desc)
  86. {
  87. struct crc_ctx *mctx = crypto_shash_ctx(desc->tfm);
  88. struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
  89. ctx->crc = mctx->key;
  90. return 0;
  91. }
  92. static int crc32_vx_setkey(struct crypto_shash *tfm, const u8 *newkey,
  93. unsigned int newkeylen)
  94. {
  95. struct crc_ctx *mctx = crypto_shash_ctx(tfm);
  96. if (newkeylen != sizeof(mctx->key))
  97. return -EINVAL;
  98. mctx->key = le32_to_cpu(*(__le32 *)newkey);
  99. return 0;
  100. }
  101. static int crc32be_vx_setkey(struct crypto_shash *tfm, const u8 *newkey,
  102. unsigned int newkeylen)
  103. {
  104. struct crc_ctx *mctx = crypto_shash_ctx(tfm);
  105. if (newkeylen != sizeof(mctx->key))
  106. return -EINVAL;
  107. mctx->key = be32_to_cpu(*(__be32 *)newkey);
  108. return 0;
  109. }
  110. static int crc32le_vx_final(struct shash_desc *desc, u8 *out)
  111. {
  112. struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
  113. *(__le32 *)out = cpu_to_le32p(&ctx->crc);
  114. return 0;
  115. }
  116. static int crc32be_vx_final(struct shash_desc *desc, u8 *out)
  117. {
  118. struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
  119. *(__be32 *)out = cpu_to_be32p(&ctx->crc);
  120. return 0;
  121. }
  122. static int crc32c_vx_final(struct shash_desc *desc, u8 *out)
  123. {
  124. struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
  125. /*
  126. * Perform a final XOR with 0xFFFFFFFF to be in sync
  127. * with the generic crc32c shash implementation.
  128. */
  129. *(__le32 *)out = ~cpu_to_le32p(&ctx->crc);
  130. return 0;
  131. }
  132. static int __crc32le_vx_finup(u32 *crc, const u8 *data, unsigned int len,
  133. u8 *out)
  134. {
  135. *(__le32 *)out = cpu_to_le32(crc32_le_vx(*crc, data, len));
  136. return 0;
  137. }
  138. static int __crc32be_vx_finup(u32 *crc, const u8 *data, unsigned int len,
  139. u8 *out)
  140. {
  141. *(__be32 *)out = cpu_to_be32(crc32_be_vx(*crc, data, len));
  142. return 0;
  143. }
  144. static int __crc32c_vx_finup(u32 *crc, const u8 *data, unsigned int len,
  145. u8 *out)
  146. {
  147. /*
  148. * Perform a final XOR with 0xFFFFFFFF to be in sync
  149. * with the generic crc32c shash implementation.
  150. */
  151. *(__le32 *)out = ~cpu_to_le32(crc32c_le_vx(*crc, data, len));
  152. return 0;
  153. }
  154. #define CRC32_VX_FINUP(alg, func) \
  155. static int alg ## _vx_finup(struct shash_desc *desc, const u8 *data, \
  156. unsigned int datalen, u8 *out) \
  157. { \
  158. return __ ## alg ## _vx_finup(shash_desc_ctx(desc), \
  159. data, datalen, out); \
  160. }
  161. CRC32_VX_FINUP(crc32le, crc32_le_vx)
  162. CRC32_VX_FINUP(crc32be, crc32_be_vx)
  163. CRC32_VX_FINUP(crc32c, crc32c_le_vx)
  164. #define CRC32_VX_DIGEST(alg, func) \
  165. static int alg ## _vx_digest(struct shash_desc *desc, const u8 *data, \
  166. unsigned int len, u8 *out) \
  167. { \
  168. return __ ## alg ## _vx_finup(crypto_shash_ctx(desc->tfm), \
  169. data, len, out); \
  170. }
  171. CRC32_VX_DIGEST(crc32le, crc32_le_vx)
  172. CRC32_VX_DIGEST(crc32be, crc32_be_vx)
  173. CRC32_VX_DIGEST(crc32c, crc32c_le_vx)
  174. #define CRC32_VX_UPDATE(alg, func) \
  175. static int alg ## _vx_update(struct shash_desc *desc, const u8 *data, \
  176. unsigned int datalen) \
  177. { \
  178. struct crc_desc_ctx *ctx = shash_desc_ctx(desc); \
  179. ctx->crc = func(ctx->crc, data, datalen); \
  180. return 0; \
  181. }
  182. CRC32_VX_UPDATE(crc32le, crc32_le_vx)
  183. CRC32_VX_UPDATE(crc32be, crc32_be_vx)
  184. CRC32_VX_UPDATE(crc32c, crc32c_le_vx)
  185. static struct shash_alg crc32_vx_algs[] = {
  186. /* CRC-32 LE */
  187. {
  188. .init = crc32_vx_init,
  189. .setkey = crc32_vx_setkey,
  190. .update = crc32le_vx_update,
  191. .final = crc32le_vx_final,
  192. .finup = crc32le_vx_finup,
  193. .digest = crc32le_vx_digest,
  194. .descsize = sizeof(struct crc_desc_ctx),
  195. .digestsize = CRC32_DIGEST_SIZE,
  196. .base = {
  197. .cra_name = "crc32",
  198. .cra_driver_name = "crc32-vx",
  199. .cra_priority = 200,
  200. .cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
  201. .cra_blocksize = CRC32_BLOCK_SIZE,
  202. .cra_ctxsize = sizeof(struct crc_ctx),
  203. .cra_module = THIS_MODULE,
  204. .cra_init = crc32_vx_cra_init_zero,
  205. },
  206. },
  207. /* CRC-32 BE */
  208. {
  209. .init = crc32_vx_init,
  210. .setkey = crc32be_vx_setkey,
  211. .update = crc32be_vx_update,
  212. .final = crc32be_vx_final,
  213. .finup = crc32be_vx_finup,
  214. .digest = crc32be_vx_digest,
  215. .descsize = sizeof(struct crc_desc_ctx),
  216. .digestsize = CRC32_DIGEST_SIZE,
  217. .base = {
  218. .cra_name = "crc32be",
  219. .cra_driver_name = "crc32be-vx",
  220. .cra_priority = 200,
  221. .cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
  222. .cra_blocksize = CRC32_BLOCK_SIZE,
  223. .cra_ctxsize = sizeof(struct crc_ctx),
  224. .cra_module = THIS_MODULE,
  225. .cra_init = crc32_vx_cra_init_zero,
  226. },
  227. },
  228. /* CRC-32C LE */
  229. {
  230. .init = crc32_vx_init,
  231. .setkey = crc32_vx_setkey,
  232. .update = crc32c_vx_update,
  233. .final = crc32c_vx_final,
  234. .finup = crc32c_vx_finup,
  235. .digest = crc32c_vx_digest,
  236. .descsize = sizeof(struct crc_desc_ctx),
  237. .digestsize = CRC32_DIGEST_SIZE,
  238. .base = {
  239. .cra_name = "crc32c",
  240. .cra_driver_name = "crc32c-vx",
  241. .cra_priority = 200,
  242. .cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
  243. .cra_blocksize = CRC32_BLOCK_SIZE,
  244. .cra_ctxsize = sizeof(struct crc_ctx),
  245. .cra_module = THIS_MODULE,
  246. .cra_init = crc32_vx_cra_init_invert,
  247. },
  248. },
  249. };
  250. static int __init crc_vx_mod_init(void)
  251. {
  252. return crypto_register_shashes(crc32_vx_algs,
  253. ARRAY_SIZE(crc32_vx_algs));
  254. }
  255. static void __exit crc_vx_mod_exit(void)
  256. {
  257. crypto_unregister_shashes(crc32_vx_algs, ARRAY_SIZE(crc32_vx_algs));
  258. }
  259. module_cpu_feature_match(S390_CPU_FEATURE_VXRS, crc_vx_mod_init);
  260. module_exit(crc_vx_mod_exit);
  261. MODULE_AUTHOR("Hendrik Brueckner <[email protected]>");
  262. MODULE_LICENSE("GPL");
  263. MODULE_ALIAS_CRYPTO("crc32");
  264. MODULE_ALIAS_CRYPTO("crc32-vx");
  265. MODULE_ALIAS_CRYPTO("crc32c");
  266. MODULE_ALIAS_CRYPTO("crc32c-vx");