aes-ce-ccm-glue.c 7.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
  4. *
  5. * Copyright (C) 2013 - 2017 Linaro Ltd <[email protected]>
  6. */
  7. #include <asm/neon.h>
  8. #include <asm/unaligned.h>
  9. #include <crypto/aes.h>
  10. #include <crypto/scatterwalk.h>
  11. #include <crypto/internal/aead.h>
  12. #include <crypto/internal/skcipher.h>
  13. #include <linux/module.h>
  14. #include "aes-ce-setkey.h"
  15. static int num_rounds(struct crypto_aes_ctx *ctx)
  16. {
  17. /*
  18. * # of rounds specified by AES:
  19. * 128 bit key 10 rounds
  20. * 192 bit key 12 rounds
  21. * 256 bit key 14 rounds
  22. * => n byte key => 6 + (n/4) rounds
  23. */
  24. return 6 + ctx->key_length / 4;
  25. }
  26. asmlinkage u32 ce_aes_ccm_auth_data(u8 mac[], u8 const in[], u32 abytes,
  27. u32 macp, u32 const rk[], u32 rounds);
  28. asmlinkage void ce_aes_ccm_encrypt(u8 out[], u8 const in[], u32 cbytes,
  29. u32 const rk[], u32 rounds, u8 mac[],
  30. u8 ctr[]);
  31. asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes,
  32. u32 const rk[], u32 rounds, u8 mac[],
  33. u8 ctr[]);
  34. asmlinkage void ce_aes_ccm_final(u8 mac[], u8 const ctr[], u32 const rk[],
  35. u32 rounds);
  36. static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key,
  37. unsigned int key_len)
  38. {
  39. struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm);
  40. return ce_aes_expandkey(ctx, in_key, key_len);
  41. }
  42. static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
  43. {
  44. if ((authsize & 1) || authsize < 4)
  45. return -EINVAL;
  46. return 0;
  47. }
  48. static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen)
  49. {
  50. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  51. __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8];
  52. u32 l = req->iv[0] + 1;
  53. /* verify that CCM dimension 'L' is set correctly in the IV */
  54. if (l < 2 || l > 8)
  55. return -EINVAL;
  56. /* verify that msglen can in fact be represented in L bytes */
  57. if (l < 4 && msglen >> (8 * l))
  58. return -EOVERFLOW;
  59. /*
  60. * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
  61. * uses a u32 type to represent msglen so the top 4 bytes are always 0.
  62. */
  63. n[0] = 0;
  64. n[1] = cpu_to_be32(msglen);
  65. memcpy(maciv, req->iv, AES_BLOCK_SIZE - l);
  66. /*
  67. * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
  68. * - bits 0..2 : max # of bytes required to represent msglen, minus 1
  69. * (already set by caller)
  70. * - bits 3..5 : size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
  71. * - bit 6 : indicates presence of authenticate-only data
  72. */
  73. maciv[0] |= (crypto_aead_authsize(aead) - 2) << 2;
  74. if (req->assoclen)
  75. maciv[0] |= 0x40;
  76. memset(&req->iv[AES_BLOCK_SIZE - l], 0, l);
  77. return 0;
  78. }
  79. static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
  80. {
  81. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  82. struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
  83. struct __packed { __be16 l; __be32 h; u16 len; } ltag;
  84. struct scatter_walk walk;
  85. u32 len = req->assoclen;
  86. u32 macp = 0;
  87. /* prepend the AAD with a length tag */
  88. if (len < 0xff00) {
  89. ltag.l = cpu_to_be16(len);
  90. ltag.len = 2;
  91. } else {
  92. ltag.l = cpu_to_be16(0xfffe);
  93. put_unaligned_be32(len, &ltag.h);
  94. ltag.len = 6;
  95. }
  96. macp = ce_aes_ccm_auth_data(mac, (u8 *)&ltag, ltag.len, macp,
  97. ctx->key_enc, num_rounds(ctx));
  98. scatterwalk_start(&walk, req->src);
  99. do {
  100. u32 n = scatterwalk_clamp(&walk, len);
  101. u8 *p;
  102. if (!n) {
  103. scatterwalk_start(&walk, sg_next(walk.sg));
  104. n = scatterwalk_clamp(&walk, len);
  105. }
  106. n = min_t(u32, n, SZ_4K); /* yield NEON at least every 4k */
  107. p = scatterwalk_map(&walk);
  108. macp = ce_aes_ccm_auth_data(mac, p, n, macp, ctx->key_enc,
  109. num_rounds(ctx));
  110. if (len / SZ_4K > (len - n) / SZ_4K) {
  111. kernel_neon_end();
  112. kernel_neon_begin();
  113. }
  114. len -= n;
  115. scatterwalk_unmap(p);
  116. scatterwalk_advance(&walk, n);
  117. scatterwalk_done(&walk, 0, len);
  118. } while (len);
  119. }
  120. static int ccm_encrypt(struct aead_request *req)
  121. {
  122. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  123. struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
  124. struct skcipher_walk walk;
  125. u8 __aligned(8) mac[AES_BLOCK_SIZE];
  126. u8 buf[AES_BLOCK_SIZE];
  127. u32 len = req->cryptlen;
  128. int err;
  129. err = ccm_init_mac(req, mac, len);
  130. if (err)
  131. return err;
  132. /* preserve the original iv for the final round */
  133. memcpy(buf, req->iv, AES_BLOCK_SIZE);
  134. err = skcipher_walk_aead_encrypt(&walk, req, false);
  135. if (unlikely(err))
  136. return err;
  137. kernel_neon_begin();
  138. if (req->assoclen)
  139. ccm_calculate_auth_mac(req, mac);
  140. do {
  141. u32 tail = walk.nbytes % AES_BLOCK_SIZE;
  142. if (walk.nbytes == walk.total)
  143. tail = 0;
  144. ce_aes_ccm_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  145. walk.nbytes - tail, ctx->key_enc,
  146. num_rounds(ctx), mac, walk.iv);
  147. if (walk.nbytes == walk.total)
  148. ce_aes_ccm_final(mac, buf, ctx->key_enc, num_rounds(ctx));
  149. kernel_neon_end();
  150. if (walk.nbytes) {
  151. err = skcipher_walk_done(&walk, tail);
  152. if (unlikely(err))
  153. return err;
  154. if (unlikely(walk.nbytes))
  155. kernel_neon_begin();
  156. }
  157. } while (walk.nbytes);
  158. /* copy authtag to end of dst */
  159. scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen,
  160. crypto_aead_authsize(aead), 1);
  161. return 0;
  162. }
  163. static int ccm_decrypt(struct aead_request *req)
  164. {
  165. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  166. struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
  167. unsigned int authsize = crypto_aead_authsize(aead);
  168. struct skcipher_walk walk;
  169. u8 __aligned(8) mac[AES_BLOCK_SIZE];
  170. u8 buf[AES_BLOCK_SIZE];
  171. u32 len = req->cryptlen - authsize;
  172. int err;
  173. err = ccm_init_mac(req, mac, len);
  174. if (err)
  175. return err;
  176. /* preserve the original iv for the final round */
  177. memcpy(buf, req->iv, AES_BLOCK_SIZE);
  178. err = skcipher_walk_aead_decrypt(&walk, req, false);
  179. if (unlikely(err))
  180. return err;
  181. kernel_neon_begin();
  182. if (req->assoclen)
  183. ccm_calculate_auth_mac(req, mac);
  184. do {
  185. u32 tail = walk.nbytes % AES_BLOCK_SIZE;
  186. if (walk.nbytes == walk.total)
  187. tail = 0;
  188. ce_aes_ccm_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  189. walk.nbytes - tail, ctx->key_enc,
  190. num_rounds(ctx), mac, walk.iv);
  191. if (walk.nbytes == walk.total)
  192. ce_aes_ccm_final(mac, buf, ctx->key_enc, num_rounds(ctx));
  193. kernel_neon_end();
  194. if (walk.nbytes) {
  195. err = skcipher_walk_done(&walk, tail);
  196. if (unlikely(err))
  197. return err;
  198. if (unlikely(walk.nbytes))
  199. kernel_neon_begin();
  200. }
  201. } while (walk.nbytes);
  202. /* compare calculated auth tag with the stored one */
  203. scatterwalk_map_and_copy(buf, req->src,
  204. req->assoclen + req->cryptlen - authsize,
  205. authsize, 0);
  206. if (crypto_memneq(mac, buf, authsize))
  207. return -EBADMSG;
  208. return 0;
  209. }
  210. static struct aead_alg ccm_aes_alg = {
  211. .base = {
  212. .cra_name = "ccm(aes)",
  213. .cra_driver_name = "ccm-aes-ce",
  214. .cra_priority = 300,
  215. .cra_blocksize = 1,
  216. .cra_ctxsize = sizeof(struct crypto_aes_ctx),
  217. .cra_module = THIS_MODULE,
  218. },
  219. .ivsize = AES_BLOCK_SIZE,
  220. .chunksize = AES_BLOCK_SIZE,
  221. .maxauthsize = AES_BLOCK_SIZE,
  222. .setkey = ccm_setkey,
  223. .setauthsize = ccm_setauthsize,
  224. .encrypt = ccm_encrypt,
  225. .decrypt = ccm_decrypt,
  226. };
  227. static int __init aes_mod_init(void)
  228. {
  229. if (!cpu_have_named_feature(AES))
  230. return -ENODEV;
  231. return crypto_register_aead(&ccm_aes_alg);
  232. }
  233. static void __exit aes_mod_exit(void)
  234. {
  235. crypto_unregister_aead(&ccm_aes_alg);
  236. }
  237. module_init(aes_mod_init);
  238. module_exit(aes_mod_exit);
  239. MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
  240. MODULE_AUTHOR("Ard Biesheuvel <[email protected]>");
  241. MODULE_LICENSE("GPL v2");
  242. MODULE_ALIAS_CRYPTO("ccm(aes)");