ghash.c 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * GHASH routines supporting VMX instructions on the Power 8
  4. *
  5. * Copyright (C) 2015, 2019 International Business Machines Inc.
  6. *
  7. * Author: Marcelo Henrique Cerri <[email protected]>
  8. *
  9. * Extended by Daniel Axtens <[email protected]> to replace the fallback
  10. * mechanism. The new approach is based on arm64 code, which is:
  11. * Copyright (C) 2014 - 2018 Linaro Ltd. <[email protected]>
  12. */
  13. #include <linux/types.h>
  14. #include <linux/err.h>
  15. #include <linux/crypto.h>
  16. #include <linux/delay.h>
  17. #include <asm/simd.h>
  18. #include <asm/switch_to.h>
  19. #include <crypto/aes.h>
  20. #include <crypto/ghash.h>
  21. #include <crypto/scatterwalk.h>
  22. #include <crypto/internal/hash.h>
  23. #include <crypto/internal/simd.h>
  24. #include <crypto/b128ops.h>
  25. #include "aesp8-ppc.h"
  26. void gcm_init_p8(u128 htable[16], const u64 Xi[2]);
  27. void gcm_gmult_p8(u64 Xi[2], const u128 htable[16]);
  28. void gcm_ghash_p8(u64 Xi[2], const u128 htable[16],
  29. const u8 *in, size_t len);
  30. struct p8_ghash_ctx {
  31. /* key used by vector asm */
  32. u128 htable[16];
  33. /* key used by software fallback */
  34. be128 key;
  35. };
  36. struct p8_ghash_desc_ctx {
  37. u64 shash[2];
  38. u8 buffer[GHASH_DIGEST_SIZE];
  39. int bytes;
  40. };
  41. static int p8_ghash_init(struct shash_desc *desc)
  42. {
  43. struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
  44. dctx->bytes = 0;
  45. memset(dctx->shash, 0, GHASH_DIGEST_SIZE);
  46. return 0;
  47. }
  48. static int p8_ghash_setkey(struct crypto_shash *tfm, const u8 *key,
  49. unsigned int keylen)
  50. {
  51. struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(tfm));
  52. if (keylen != GHASH_BLOCK_SIZE)
  53. return -EINVAL;
  54. preempt_disable();
  55. pagefault_disable();
  56. enable_kernel_vsx();
  57. gcm_init_p8(ctx->htable, (const u64 *) key);
  58. disable_kernel_vsx();
  59. pagefault_enable();
  60. preempt_enable();
  61. memcpy(&ctx->key, key, GHASH_BLOCK_SIZE);
  62. return 0;
  63. }
  64. static inline void __ghash_block(struct p8_ghash_ctx *ctx,
  65. struct p8_ghash_desc_ctx *dctx)
  66. {
  67. if (crypto_simd_usable()) {
  68. preempt_disable();
  69. pagefault_disable();
  70. enable_kernel_vsx();
  71. gcm_ghash_p8(dctx->shash, ctx->htable,
  72. dctx->buffer, GHASH_DIGEST_SIZE);
  73. disable_kernel_vsx();
  74. pagefault_enable();
  75. preempt_enable();
  76. } else {
  77. crypto_xor((u8 *)dctx->shash, dctx->buffer, GHASH_BLOCK_SIZE);
  78. gf128mul_lle((be128 *)dctx->shash, &ctx->key);
  79. }
  80. }
  81. static inline void __ghash_blocks(struct p8_ghash_ctx *ctx,
  82. struct p8_ghash_desc_ctx *dctx,
  83. const u8 *src, unsigned int srclen)
  84. {
  85. if (crypto_simd_usable()) {
  86. preempt_disable();
  87. pagefault_disable();
  88. enable_kernel_vsx();
  89. gcm_ghash_p8(dctx->shash, ctx->htable,
  90. src, srclen);
  91. disable_kernel_vsx();
  92. pagefault_enable();
  93. preempt_enable();
  94. } else {
  95. while (srclen >= GHASH_BLOCK_SIZE) {
  96. crypto_xor((u8 *)dctx->shash, src, GHASH_BLOCK_SIZE);
  97. gf128mul_lle((be128 *)dctx->shash, &ctx->key);
  98. srclen -= GHASH_BLOCK_SIZE;
  99. src += GHASH_BLOCK_SIZE;
  100. }
  101. }
  102. }
  103. static int p8_ghash_update(struct shash_desc *desc,
  104. const u8 *src, unsigned int srclen)
  105. {
  106. unsigned int len;
  107. struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
  108. struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
  109. if (dctx->bytes) {
  110. if (dctx->bytes + srclen < GHASH_DIGEST_SIZE) {
  111. memcpy(dctx->buffer + dctx->bytes, src,
  112. srclen);
  113. dctx->bytes += srclen;
  114. return 0;
  115. }
  116. memcpy(dctx->buffer + dctx->bytes, src,
  117. GHASH_DIGEST_SIZE - dctx->bytes);
  118. __ghash_block(ctx, dctx);
  119. src += GHASH_DIGEST_SIZE - dctx->bytes;
  120. srclen -= GHASH_DIGEST_SIZE - dctx->bytes;
  121. dctx->bytes = 0;
  122. }
  123. len = srclen & ~(GHASH_DIGEST_SIZE - 1);
  124. if (len) {
  125. __ghash_blocks(ctx, dctx, src, len);
  126. src += len;
  127. srclen -= len;
  128. }
  129. if (srclen) {
  130. memcpy(dctx->buffer, src, srclen);
  131. dctx->bytes = srclen;
  132. }
  133. return 0;
  134. }
  135. static int p8_ghash_final(struct shash_desc *desc, u8 *out)
  136. {
  137. int i;
  138. struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
  139. struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
  140. if (dctx->bytes) {
  141. for (i = dctx->bytes; i < GHASH_DIGEST_SIZE; i++)
  142. dctx->buffer[i] = 0;
  143. __ghash_block(ctx, dctx);
  144. dctx->bytes = 0;
  145. }
  146. memcpy(out, dctx->shash, GHASH_DIGEST_SIZE);
  147. return 0;
  148. }
  149. struct shash_alg p8_ghash_alg = {
  150. .digestsize = GHASH_DIGEST_SIZE,
  151. .init = p8_ghash_init,
  152. .update = p8_ghash_update,
  153. .final = p8_ghash_final,
  154. .setkey = p8_ghash_setkey,
  155. .descsize = sizeof(struct p8_ghash_desc_ctx)
  156. + sizeof(struct ghash_desc_ctx),
  157. .base = {
  158. .cra_name = "ghash",
  159. .cra_driver_name = "p8_ghash",
  160. .cra_priority = 1000,
  161. .cra_blocksize = GHASH_BLOCK_SIZE,
  162. .cra_ctxsize = sizeof(struct p8_ghash_ctx),
  163. .cra_module = THIS_MODULE,
  164. },
  165. };