aes-neonbs-glue.c 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * Bit sliced AES using NEON instructions
  4. *
  5. * Copyright (C) 2016 - 2017 Linaro Ltd <[email protected]>
  6. */
  7. #include <asm/neon.h>
  8. #include <asm/simd.h>
  9. #include <crypto/aes.h>
  10. #include <crypto/ctr.h>
  11. #include <crypto/internal/simd.h>
  12. #include <crypto/internal/skcipher.h>
  13. #include <crypto/scatterwalk.h>
  14. #include <crypto/xts.h>
  15. #include <linux/module.h>
  16. MODULE_AUTHOR("Ard Biesheuvel <[email protected]>");
  17. MODULE_LICENSE("GPL v2");
  18. MODULE_ALIAS_CRYPTO("ecb(aes)");
  19. MODULE_ALIAS_CRYPTO("cbc(aes)");
  20. MODULE_ALIAS_CRYPTO("ctr(aes)");
  21. MODULE_ALIAS_CRYPTO("xts(aes)");
  22. asmlinkage void aesbs_convert_key(u8 out[], u32 const rk[], int rounds);
  23. asmlinkage void aesbs_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
  24. int rounds, int blocks);
  25. asmlinkage void aesbs_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
  26. int rounds, int blocks);
  27. asmlinkage void aesbs_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
  28. int rounds, int blocks, u8 iv[]);
  29. asmlinkage void aesbs_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
  30. int rounds, int blocks, u8 iv[]);
  31. asmlinkage void aesbs_xts_encrypt(u8 out[], u8 const in[], u8 const rk[],
  32. int rounds, int blocks, u8 iv[]);
  33. asmlinkage void aesbs_xts_decrypt(u8 out[], u8 const in[], u8 const rk[],
  34. int rounds, int blocks, u8 iv[]);
  35. /* borrowed from aes-neon-blk.ko */
  36. asmlinkage void neon_aes_ecb_encrypt(u8 out[], u8 const in[], u32 const rk[],
  37. int rounds, int blocks);
  38. asmlinkage void neon_aes_cbc_encrypt(u8 out[], u8 const in[], u32 const rk[],
  39. int rounds, int blocks, u8 iv[]);
  40. asmlinkage void neon_aes_ctr_encrypt(u8 out[], u8 const in[], u32 const rk[],
  41. int rounds, int bytes, u8 ctr[]);
  42. asmlinkage void neon_aes_xts_encrypt(u8 out[], u8 const in[],
  43. u32 const rk1[], int rounds, int bytes,
  44. u32 const rk2[], u8 iv[], int first);
  45. asmlinkage void neon_aes_xts_decrypt(u8 out[], u8 const in[],
  46. u32 const rk1[], int rounds, int bytes,
  47. u32 const rk2[], u8 iv[], int first);
  48. struct aesbs_ctx {
  49. u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32];
  50. int rounds;
  51. } __aligned(AES_BLOCK_SIZE);
  52. struct aesbs_cbc_ctr_ctx {
  53. struct aesbs_ctx key;
  54. u32 enc[AES_MAX_KEYLENGTH_U32];
  55. };
  56. struct aesbs_xts_ctx {
  57. struct aesbs_ctx key;
  58. u32 twkey[AES_MAX_KEYLENGTH_U32];
  59. struct crypto_aes_ctx cts;
  60. };
  61. static int aesbs_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
  62. unsigned int key_len)
  63. {
  64. struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
  65. struct crypto_aes_ctx rk;
  66. int err;
  67. err = aes_expandkey(&rk, in_key, key_len);
  68. if (err)
  69. return err;
  70. ctx->rounds = 6 + key_len / 4;
  71. kernel_neon_begin();
  72. aesbs_convert_key(ctx->rk, rk.key_enc, ctx->rounds);
  73. kernel_neon_end();
  74. return 0;
  75. }
  76. static int __ecb_crypt(struct skcipher_request *req,
  77. void (*fn)(u8 out[], u8 const in[], u8 const rk[],
  78. int rounds, int blocks))
  79. {
  80. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  81. struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
  82. struct skcipher_walk walk;
  83. int err;
  84. err = skcipher_walk_virt(&walk, req, false);
  85. while (walk.nbytes >= AES_BLOCK_SIZE) {
  86. unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
  87. if (walk.nbytes < walk.total)
  88. blocks = round_down(blocks,
  89. walk.stride / AES_BLOCK_SIZE);
  90. kernel_neon_begin();
  91. fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk,
  92. ctx->rounds, blocks);
  93. kernel_neon_end();
  94. err = skcipher_walk_done(&walk,
  95. walk.nbytes - blocks * AES_BLOCK_SIZE);
  96. }
  97. return err;
  98. }
  99. static int ecb_encrypt(struct skcipher_request *req)
  100. {
  101. return __ecb_crypt(req, aesbs_ecb_encrypt);
  102. }
  103. static int ecb_decrypt(struct skcipher_request *req)
  104. {
  105. return __ecb_crypt(req, aesbs_ecb_decrypt);
  106. }
  107. static int aesbs_cbc_ctr_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
  108. unsigned int key_len)
  109. {
  110. struct aesbs_cbc_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
  111. struct crypto_aes_ctx rk;
  112. int err;
  113. err = aes_expandkey(&rk, in_key, key_len);
  114. if (err)
  115. return err;
  116. ctx->key.rounds = 6 + key_len / 4;
  117. memcpy(ctx->enc, rk.key_enc, sizeof(ctx->enc));
  118. kernel_neon_begin();
  119. aesbs_convert_key(ctx->key.rk, rk.key_enc, ctx->key.rounds);
  120. kernel_neon_end();
  121. memzero_explicit(&rk, sizeof(rk));
  122. return 0;
  123. }
  124. static int cbc_encrypt(struct skcipher_request *req)
  125. {
  126. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  127. struct aesbs_cbc_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
  128. struct skcipher_walk walk;
  129. int err;
  130. err = skcipher_walk_virt(&walk, req, false);
  131. while (walk.nbytes >= AES_BLOCK_SIZE) {
  132. unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
  133. /* fall back to the non-bitsliced NEON implementation */
  134. kernel_neon_begin();
  135. neon_aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  136. ctx->enc, ctx->key.rounds, blocks,
  137. walk.iv);
  138. kernel_neon_end();
  139. err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
  140. }
  141. return err;
  142. }
  143. static int cbc_decrypt(struct skcipher_request *req)
  144. {
  145. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  146. struct aesbs_cbc_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
  147. struct skcipher_walk walk;
  148. int err;
  149. err = skcipher_walk_virt(&walk, req, false);
  150. while (walk.nbytes >= AES_BLOCK_SIZE) {
  151. unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
  152. if (walk.nbytes < walk.total)
  153. blocks = round_down(blocks,
  154. walk.stride / AES_BLOCK_SIZE);
  155. kernel_neon_begin();
  156. aesbs_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  157. ctx->key.rk, ctx->key.rounds, blocks,
  158. walk.iv);
  159. kernel_neon_end();
  160. err = skcipher_walk_done(&walk,
  161. walk.nbytes - blocks * AES_BLOCK_SIZE);
  162. }
  163. return err;
  164. }
  165. static int ctr_encrypt(struct skcipher_request *req)
  166. {
  167. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  168. struct aesbs_cbc_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
  169. struct skcipher_walk walk;
  170. int err;
  171. err = skcipher_walk_virt(&walk, req, false);
  172. while (walk.nbytes > 0) {
  173. int blocks = (walk.nbytes / AES_BLOCK_SIZE) & ~7;
  174. int nbytes = walk.nbytes % (8 * AES_BLOCK_SIZE);
  175. const u8 *src = walk.src.virt.addr;
  176. u8 *dst = walk.dst.virt.addr;
  177. kernel_neon_begin();
  178. if (blocks >= 8) {
  179. aesbs_ctr_encrypt(dst, src, ctx->key.rk, ctx->key.rounds,
  180. blocks, walk.iv);
  181. dst += blocks * AES_BLOCK_SIZE;
  182. src += blocks * AES_BLOCK_SIZE;
  183. }
  184. if (nbytes && walk.nbytes == walk.total) {
  185. neon_aes_ctr_encrypt(dst, src, ctx->enc, ctx->key.rounds,
  186. nbytes, walk.iv);
  187. nbytes = 0;
  188. }
  189. kernel_neon_end();
  190. err = skcipher_walk_done(&walk, nbytes);
  191. }
  192. return err;
  193. }
  194. static int aesbs_xts_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
  195. unsigned int key_len)
  196. {
  197. struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  198. struct crypto_aes_ctx rk;
  199. int err;
  200. err = xts_verify_key(tfm, in_key, key_len);
  201. if (err)
  202. return err;
  203. key_len /= 2;
  204. err = aes_expandkey(&ctx->cts, in_key, key_len);
  205. if (err)
  206. return err;
  207. err = aes_expandkey(&rk, in_key + key_len, key_len);
  208. if (err)
  209. return err;
  210. memcpy(ctx->twkey, rk.key_enc, sizeof(ctx->twkey));
  211. return aesbs_setkey(tfm, in_key, key_len);
  212. }
  213. static int __xts_crypt(struct skcipher_request *req, bool encrypt,
  214. void (*fn)(u8 out[], u8 const in[], u8 const rk[],
  215. int rounds, int blocks, u8 iv[]))
  216. {
  217. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  218. struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  219. int tail = req->cryptlen % (8 * AES_BLOCK_SIZE);
  220. struct scatterlist sg_src[2], sg_dst[2];
  221. struct skcipher_request subreq;
  222. struct scatterlist *src, *dst;
  223. struct skcipher_walk walk;
  224. int nbytes, err;
  225. int first = 1;
  226. u8 *out, *in;
  227. if (req->cryptlen < AES_BLOCK_SIZE)
  228. return -EINVAL;
  229. /* ensure that the cts tail is covered by a single step */
  230. if (unlikely(tail > 0 && tail < AES_BLOCK_SIZE)) {
  231. int xts_blocks = DIV_ROUND_UP(req->cryptlen,
  232. AES_BLOCK_SIZE) - 2;
  233. skcipher_request_set_tfm(&subreq, tfm);
  234. skcipher_request_set_callback(&subreq,
  235. skcipher_request_flags(req),
  236. NULL, NULL);
  237. skcipher_request_set_crypt(&subreq, req->src, req->dst,
  238. xts_blocks * AES_BLOCK_SIZE,
  239. req->iv);
  240. req = &subreq;
  241. } else {
  242. tail = 0;
  243. }
  244. err = skcipher_walk_virt(&walk, req, false);
  245. if (err)
  246. return err;
  247. while (walk.nbytes >= AES_BLOCK_SIZE) {
  248. int blocks = (walk.nbytes / AES_BLOCK_SIZE) & ~7;
  249. out = walk.dst.virt.addr;
  250. in = walk.src.virt.addr;
  251. nbytes = walk.nbytes;
  252. kernel_neon_begin();
  253. if (blocks >= 8) {
  254. if (first == 1)
  255. neon_aes_ecb_encrypt(walk.iv, walk.iv,
  256. ctx->twkey,
  257. ctx->key.rounds, 1);
  258. first = 2;
  259. fn(out, in, ctx->key.rk, ctx->key.rounds, blocks,
  260. walk.iv);
  261. out += blocks * AES_BLOCK_SIZE;
  262. in += blocks * AES_BLOCK_SIZE;
  263. nbytes -= blocks * AES_BLOCK_SIZE;
  264. }
  265. if (walk.nbytes == walk.total && nbytes > 0) {
  266. if (encrypt)
  267. neon_aes_xts_encrypt(out, in, ctx->cts.key_enc,
  268. ctx->key.rounds, nbytes,
  269. ctx->twkey, walk.iv, first);
  270. else
  271. neon_aes_xts_decrypt(out, in, ctx->cts.key_dec,
  272. ctx->key.rounds, nbytes,
  273. ctx->twkey, walk.iv, first);
  274. nbytes = first = 0;
  275. }
  276. kernel_neon_end();
  277. err = skcipher_walk_done(&walk, nbytes);
  278. }
  279. if (err || likely(!tail))
  280. return err;
  281. /* handle ciphertext stealing */
  282. dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
  283. if (req->dst != req->src)
  284. dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
  285. skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
  286. req->iv);
  287. err = skcipher_walk_virt(&walk, req, false);
  288. if (err)
  289. return err;
  290. out = walk.dst.virt.addr;
  291. in = walk.src.virt.addr;
  292. nbytes = walk.nbytes;
  293. kernel_neon_begin();
  294. if (encrypt)
  295. neon_aes_xts_encrypt(out, in, ctx->cts.key_enc, ctx->key.rounds,
  296. nbytes, ctx->twkey, walk.iv, first);
  297. else
  298. neon_aes_xts_decrypt(out, in, ctx->cts.key_dec, ctx->key.rounds,
  299. nbytes, ctx->twkey, walk.iv, first);
  300. kernel_neon_end();
  301. return skcipher_walk_done(&walk, 0);
  302. }
  303. static int xts_encrypt(struct skcipher_request *req)
  304. {
  305. return __xts_crypt(req, true, aesbs_xts_encrypt);
  306. }
  307. static int xts_decrypt(struct skcipher_request *req)
  308. {
  309. return __xts_crypt(req, false, aesbs_xts_decrypt);
  310. }
  311. static struct skcipher_alg aes_algs[] = { {
  312. .base.cra_name = "ecb(aes)",
  313. .base.cra_driver_name = "ecb-aes-neonbs",
  314. .base.cra_priority = 250,
  315. .base.cra_blocksize = AES_BLOCK_SIZE,
  316. .base.cra_ctxsize = sizeof(struct aesbs_ctx),
  317. .base.cra_module = THIS_MODULE,
  318. .min_keysize = AES_MIN_KEY_SIZE,
  319. .max_keysize = AES_MAX_KEY_SIZE,
  320. .walksize = 8 * AES_BLOCK_SIZE,
  321. .setkey = aesbs_setkey,
  322. .encrypt = ecb_encrypt,
  323. .decrypt = ecb_decrypt,
  324. }, {
  325. .base.cra_name = "cbc(aes)",
  326. .base.cra_driver_name = "cbc-aes-neonbs",
  327. .base.cra_priority = 250,
  328. .base.cra_blocksize = AES_BLOCK_SIZE,
  329. .base.cra_ctxsize = sizeof(struct aesbs_cbc_ctr_ctx),
  330. .base.cra_module = THIS_MODULE,
  331. .min_keysize = AES_MIN_KEY_SIZE,
  332. .max_keysize = AES_MAX_KEY_SIZE,
  333. .walksize = 8 * AES_BLOCK_SIZE,
  334. .ivsize = AES_BLOCK_SIZE,
  335. .setkey = aesbs_cbc_ctr_setkey,
  336. .encrypt = cbc_encrypt,
  337. .decrypt = cbc_decrypt,
  338. }, {
  339. .base.cra_name = "ctr(aes)",
  340. .base.cra_driver_name = "ctr-aes-neonbs",
  341. .base.cra_priority = 250,
  342. .base.cra_blocksize = 1,
  343. .base.cra_ctxsize = sizeof(struct aesbs_cbc_ctr_ctx),
  344. .base.cra_module = THIS_MODULE,
  345. .min_keysize = AES_MIN_KEY_SIZE,
  346. .max_keysize = AES_MAX_KEY_SIZE,
  347. .chunksize = AES_BLOCK_SIZE,
  348. .walksize = 8 * AES_BLOCK_SIZE,
  349. .ivsize = AES_BLOCK_SIZE,
  350. .setkey = aesbs_cbc_ctr_setkey,
  351. .encrypt = ctr_encrypt,
  352. .decrypt = ctr_encrypt,
  353. }, {
  354. .base.cra_name = "xts(aes)",
  355. .base.cra_driver_name = "xts-aes-neonbs",
  356. .base.cra_priority = 250,
  357. .base.cra_blocksize = AES_BLOCK_SIZE,
  358. .base.cra_ctxsize = sizeof(struct aesbs_xts_ctx),
  359. .base.cra_module = THIS_MODULE,
  360. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  361. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  362. .walksize = 8 * AES_BLOCK_SIZE,
  363. .ivsize = AES_BLOCK_SIZE,
  364. .setkey = aesbs_xts_setkey,
  365. .encrypt = xts_encrypt,
  366. .decrypt = xts_decrypt,
  367. } };
  368. static void aes_exit(void)
  369. {
  370. crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
  371. }
  372. static int __init aes_init(void)
  373. {
  374. if (!cpu_have_named_feature(ASIMD))
  375. return -ENODEV;
  376. return crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
  377. }
  378. module_init(aes_init);
  379. module_exit(aes_exit);