aes-glue.c 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
  4. *
  5. * Copyright (C) 2013 - 2017 Linaro Ltd <[email protected]>
  6. */
  7. #include <asm/neon.h>
  8. #include <asm/hwcap.h>
  9. #include <asm/simd.h>
  10. #include <crypto/aes.h>
  11. #include <crypto/ctr.h>
  12. #include <crypto/sha2.h>
  13. #include <crypto/internal/hash.h>
  14. #include <crypto/internal/simd.h>
  15. #include <crypto/internal/skcipher.h>
  16. #include <crypto/scatterwalk.h>
  17. #include <linux/module.h>
  18. #include <linux/cpufeature.h>
  19. #include <crypto/xts.h>
  20. #include "aes-ce-setkey.h"
  21. #ifdef USE_V8_CRYPTO_EXTENSIONS
  22. #define MODE "ce"
  23. #define PRIO 300
  24. #define aes_expandkey ce_aes_expandkey
  25. #define aes_ecb_encrypt ce_aes_ecb_encrypt
  26. #define aes_ecb_decrypt ce_aes_ecb_decrypt
  27. #define aes_cbc_encrypt ce_aes_cbc_encrypt
  28. #define aes_cbc_decrypt ce_aes_cbc_decrypt
  29. #define aes_cbc_cts_encrypt ce_aes_cbc_cts_encrypt
  30. #define aes_cbc_cts_decrypt ce_aes_cbc_cts_decrypt
  31. #define aes_essiv_cbc_encrypt ce_aes_essiv_cbc_encrypt
  32. #define aes_essiv_cbc_decrypt ce_aes_essiv_cbc_decrypt
  33. #define aes_ctr_encrypt ce_aes_ctr_encrypt
  34. #define aes_xctr_encrypt ce_aes_xctr_encrypt
  35. #define aes_xts_encrypt ce_aes_xts_encrypt
  36. #define aes_xts_decrypt ce_aes_xts_decrypt
  37. #define aes_mac_update ce_aes_mac_update
  38. MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS/XCTR using ARMv8 Crypto Extensions");
  39. #else
  40. #define MODE "neon"
  41. #define PRIO 200
  42. #define aes_ecb_encrypt neon_aes_ecb_encrypt
  43. #define aes_ecb_decrypt neon_aes_ecb_decrypt
  44. #define aes_cbc_encrypt neon_aes_cbc_encrypt
  45. #define aes_cbc_decrypt neon_aes_cbc_decrypt
  46. #define aes_cbc_cts_encrypt neon_aes_cbc_cts_encrypt
  47. #define aes_cbc_cts_decrypt neon_aes_cbc_cts_decrypt
  48. #define aes_essiv_cbc_encrypt neon_aes_essiv_cbc_encrypt
  49. #define aes_essiv_cbc_decrypt neon_aes_essiv_cbc_decrypt
  50. #define aes_ctr_encrypt neon_aes_ctr_encrypt
  51. #define aes_xctr_encrypt neon_aes_xctr_encrypt
  52. #define aes_xts_encrypt neon_aes_xts_encrypt
  53. #define aes_xts_decrypt neon_aes_xts_decrypt
  54. #define aes_mac_update neon_aes_mac_update
  55. MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS/XCTR using ARMv8 NEON");
  56. #endif
  57. #if defined(USE_V8_CRYPTO_EXTENSIONS) || !IS_ENABLED(CONFIG_CRYPTO_AES_ARM64_BS)
  58. MODULE_ALIAS_CRYPTO("ecb(aes)");
  59. MODULE_ALIAS_CRYPTO("cbc(aes)");
  60. MODULE_ALIAS_CRYPTO("ctr(aes)");
  61. MODULE_ALIAS_CRYPTO("xts(aes)");
  62. MODULE_ALIAS_CRYPTO("xctr(aes)");
  63. #endif
  64. MODULE_ALIAS_CRYPTO("cts(cbc(aes))");
  65. MODULE_ALIAS_CRYPTO("essiv(cbc(aes),sha256)");
  66. MODULE_ALIAS_CRYPTO("cmac(aes)");
  67. MODULE_ALIAS_CRYPTO("xcbc(aes)");
  68. MODULE_ALIAS_CRYPTO("cbcmac(aes)");
  69. MODULE_AUTHOR("Ard Biesheuvel <[email protected]>");
  70. MODULE_LICENSE("GPL v2");
  71. /* defined in aes-modes.S */
  72. asmlinkage void aes_ecb_encrypt(u8 out[], u8 const in[], u32 const rk[],
  73. int rounds, int blocks);
  74. asmlinkage void aes_ecb_decrypt(u8 out[], u8 const in[], u32 const rk[],
  75. int rounds, int blocks);
  76. asmlinkage void aes_cbc_encrypt(u8 out[], u8 const in[], u32 const rk[],
  77. int rounds, int blocks, u8 iv[]);
  78. asmlinkage void aes_cbc_decrypt(u8 out[], u8 const in[], u32 const rk[],
  79. int rounds, int blocks, u8 iv[]);
  80. asmlinkage void aes_cbc_cts_encrypt(u8 out[], u8 const in[], u32 const rk[],
  81. int rounds, int bytes, u8 const iv[]);
  82. asmlinkage void aes_cbc_cts_decrypt(u8 out[], u8 const in[], u32 const rk[],
  83. int rounds, int bytes, u8 const iv[]);
  84. asmlinkage void aes_ctr_encrypt(u8 out[], u8 const in[], u32 const rk[],
  85. int rounds, int bytes, u8 ctr[]);
  86. asmlinkage void aes_xctr_encrypt(u8 out[], u8 const in[], u32 const rk[],
  87. int rounds, int bytes, u8 ctr[], int byte_ctr);
  88. asmlinkage void aes_xts_encrypt(u8 out[], u8 const in[], u32 const rk1[],
  89. int rounds, int bytes, u32 const rk2[], u8 iv[],
  90. int first);
  91. asmlinkage void aes_xts_decrypt(u8 out[], u8 const in[], u32 const rk1[],
  92. int rounds, int bytes, u32 const rk2[], u8 iv[],
  93. int first);
  94. asmlinkage void aes_essiv_cbc_encrypt(u8 out[], u8 const in[], u32 const rk1[],
  95. int rounds, int blocks, u8 iv[],
  96. u32 const rk2[]);
  97. asmlinkage void aes_essiv_cbc_decrypt(u8 out[], u8 const in[], u32 const rk1[],
  98. int rounds, int blocks, u8 iv[],
  99. u32 const rk2[]);
  100. asmlinkage int aes_mac_update(u8 const in[], u32 const rk[], int rounds,
  101. int blocks, u8 dg[], int enc_before,
  102. int enc_after);
  103. struct crypto_aes_xts_ctx {
  104. struct crypto_aes_ctx key1;
  105. struct crypto_aes_ctx __aligned(8) key2;
  106. };
  107. struct crypto_aes_essiv_cbc_ctx {
  108. struct crypto_aes_ctx key1;
  109. struct crypto_aes_ctx __aligned(8) key2;
  110. struct crypto_shash *hash;
  111. };
  112. struct mac_tfm_ctx {
  113. struct crypto_aes_ctx key;
  114. u8 __aligned(8) consts[];
  115. };
  116. struct mac_desc_ctx {
  117. unsigned int len;
  118. u8 dg[AES_BLOCK_SIZE];
  119. };
  120. static int skcipher_aes_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
  121. unsigned int key_len)
  122. {
  123. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  124. return aes_expandkey(ctx, in_key, key_len);
  125. }
  126. static int __maybe_unused xts_set_key(struct crypto_skcipher *tfm,
  127. const u8 *in_key, unsigned int key_len)
  128. {
  129. struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  130. int ret;
  131. ret = xts_verify_key(tfm, in_key, key_len);
  132. if (ret)
  133. return ret;
  134. ret = aes_expandkey(&ctx->key1, in_key, key_len / 2);
  135. if (!ret)
  136. ret = aes_expandkey(&ctx->key2, &in_key[key_len / 2],
  137. key_len / 2);
  138. return ret;
  139. }
  140. static int __maybe_unused essiv_cbc_set_key(struct crypto_skcipher *tfm,
  141. const u8 *in_key,
  142. unsigned int key_len)
  143. {
  144. struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
  145. u8 digest[SHA256_DIGEST_SIZE];
  146. int ret;
  147. ret = aes_expandkey(&ctx->key1, in_key, key_len);
  148. if (ret)
  149. return ret;
  150. crypto_shash_tfm_digest(ctx->hash, in_key, key_len, digest);
  151. return aes_expandkey(&ctx->key2, digest, sizeof(digest));
  152. }
  153. static int __maybe_unused ecb_encrypt(struct skcipher_request *req)
  154. {
  155. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  156. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  157. int err, rounds = 6 + ctx->key_length / 4;
  158. struct skcipher_walk walk;
  159. unsigned int blocks;
  160. err = skcipher_walk_virt(&walk, req, false);
  161. while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
  162. kernel_neon_begin();
  163. aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  164. ctx->key_enc, rounds, blocks);
  165. kernel_neon_end();
  166. err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
  167. }
  168. return err;
  169. }
  170. static int __maybe_unused ecb_decrypt(struct skcipher_request *req)
  171. {
  172. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  173. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  174. int err, rounds = 6 + ctx->key_length / 4;
  175. struct skcipher_walk walk;
  176. unsigned int blocks;
  177. err = skcipher_walk_virt(&walk, req, false);
  178. while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
  179. kernel_neon_begin();
  180. aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  181. ctx->key_dec, rounds, blocks);
  182. kernel_neon_end();
  183. err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
  184. }
  185. return err;
  186. }
  187. static int cbc_encrypt_walk(struct skcipher_request *req,
  188. struct skcipher_walk *walk)
  189. {
  190. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  191. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  192. int err = 0, rounds = 6 + ctx->key_length / 4;
  193. unsigned int blocks;
  194. while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) {
  195. kernel_neon_begin();
  196. aes_cbc_encrypt(walk->dst.virt.addr, walk->src.virt.addr,
  197. ctx->key_enc, rounds, blocks, walk->iv);
  198. kernel_neon_end();
  199. err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE);
  200. }
  201. return err;
  202. }
  203. static int __maybe_unused cbc_encrypt(struct skcipher_request *req)
  204. {
  205. struct skcipher_walk walk;
  206. int err;
  207. err = skcipher_walk_virt(&walk, req, false);
  208. if (err)
  209. return err;
  210. return cbc_encrypt_walk(req, &walk);
  211. }
  212. static int cbc_decrypt_walk(struct skcipher_request *req,
  213. struct skcipher_walk *walk)
  214. {
  215. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  216. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  217. int err = 0, rounds = 6 + ctx->key_length / 4;
  218. unsigned int blocks;
  219. while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) {
  220. kernel_neon_begin();
  221. aes_cbc_decrypt(walk->dst.virt.addr, walk->src.virt.addr,
  222. ctx->key_dec, rounds, blocks, walk->iv);
  223. kernel_neon_end();
  224. err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE);
  225. }
  226. return err;
  227. }
  228. static int __maybe_unused cbc_decrypt(struct skcipher_request *req)
  229. {
  230. struct skcipher_walk walk;
  231. int err;
  232. err = skcipher_walk_virt(&walk, req, false);
  233. if (err)
  234. return err;
  235. return cbc_decrypt_walk(req, &walk);
  236. }
  237. static int cts_cbc_encrypt(struct skcipher_request *req)
  238. {
  239. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  240. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  241. int err, rounds = 6 + ctx->key_length / 4;
  242. int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
  243. struct scatterlist *src = req->src, *dst = req->dst;
  244. struct scatterlist sg_src[2], sg_dst[2];
  245. struct skcipher_request subreq;
  246. struct skcipher_walk walk;
  247. skcipher_request_set_tfm(&subreq, tfm);
  248. skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
  249. NULL, NULL);
  250. if (req->cryptlen <= AES_BLOCK_SIZE) {
  251. if (req->cryptlen < AES_BLOCK_SIZE)
  252. return -EINVAL;
  253. cbc_blocks = 1;
  254. }
  255. if (cbc_blocks > 0) {
  256. skcipher_request_set_crypt(&subreq, req->src, req->dst,
  257. cbc_blocks * AES_BLOCK_SIZE,
  258. req->iv);
  259. err = skcipher_walk_virt(&walk, &subreq, false) ?:
  260. cbc_encrypt_walk(&subreq, &walk);
  261. if (err)
  262. return err;
  263. if (req->cryptlen == AES_BLOCK_SIZE)
  264. return 0;
  265. dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
  266. if (req->dst != req->src)
  267. dst = scatterwalk_ffwd(sg_dst, req->dst,
  268. subreq.cryptlen);
  269. }
  270. /* handle ciphertext stealing */
  271. skcipher_request_set_crypt(&subreq, src, dst,
  272. req->cryptlen - cbc_blocks * AES_BLOCK_SIZE,
  273. req->iv);
  274. err = skcipher_walk_virt(&walk, &subreq, false);
  275. if (err)
  276. return err;
  277. kernel_neon_begin();
  278. aes_cbc_cts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  279. ctx->key_enc, rounds, walk.nbytes, walk.iv);
  280. kernel_neon_end();
  281. return skcipher_walk_done(&walk, 0);
  282. }
  283. static int cts_cbc_decrypt(struct skcipher_request *req)
  284. {
  285. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  286. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  287. int err, rounds = 6 + ctx->key_length / 4;
  288. int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
  289. struct scatterlist *src = req->src, *dst = req->dst;
  290. struct scatterlist sg_src[2], sg_dst[2];
  291. struct skcipher_request subreq;
  292. struct skcipher_walk walk;
  293. skcipher_request_set_tfm(&subreq, tfm);
  294. skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
  295. NULL, NULL);
  296. if (req->cryptlen <= AES_BLOCK_SIZE) {
  297. if (req->cryptlen < AES_BLOCK_SIZE)
  298. return -EINVAL;
  299. cbc_blocks = 1;
  300. }
  301. if (cbc_blocks > 0) {
  302. skcipher_request_set_crypt(&subreq, req->src, req->dst,
  303. cbc_blocks * AES_BLOCK_SIZE,
  304. req->iv);
  305. err = skcipher_walk_virt(&walk, &subreq, false) ?:
  306. cbc_decrypt_walk(&subreq, &walk);
  307. if (err)
  308. return err;
  309. if (req->cryptlen == AES_BLOCK_SIZE)
  310. return 0;
  311. dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
  312. if (req->dst != req->src)
  313. dst = scatterwalk_ffwd(sg_dst, req->dst,
  314. subreq.cryptlen);
  315. }
  316. /* handle ciphertext stealing */
  317. skcipher_request_set_crypt(&subreq, src, dst,
  318. req->cryptlen - cbc_blocks * AES_BLOCK_SIZE,
  319. req->iv);
  320. err = skcipher_walk_virt(&walk, &subreq, false);
  321. if (err)
  322. return err;
  323. kernel_neon_begin();
  324. aes_cbc_cts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  325. ctx->key_dec, rounds, walk.nbytes, walk.iv);
  326. kernel_neon_end();
  327. return skcipher_walk_done(&walk, 0);
  328. }
  329. static int __maybe_unused essiv_cbc_init_tfm(struct crypto_skcipher *tfm)
  330. {
  331. struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
  332. ctx->hash = crypto_alloc_shash("sha256", 0, 0);
  333. return PTR_ERR_OR_ZERO(ctx->hash);
  334. }
  335. static void __maybe_unused essiv_cbc_exit_tfm(struct crypto_skcipher *tfm)
  336. {
  337. struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
  338. crypto_free_shash(ctx->hash);
  339. }
  340. static int __maybe_unused essiv_cbc_encrypt(struct skcipher_request *req)
  341. {
  342. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  343. struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
  344. int err, rounds = 6 + ctx->key1.key_length / 4;
  345. struct skcipher_walk walk;
  346. unsigned int blocks;
  347. err = skcipher_walk_virt(&walk, req, false);
  348. blocks = walk.nbytes / AES_BLOCK_SIZE;
  349. if (blocks) {
  350. kernel_neon_begin();
  351. aes_essiv_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  352. ctx->key1.key_enc, rounds, blocks,
  353. req->iv, ctx->key2.key_enc);
  354. kernel_neon_end();
  355. err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
  356. }
  357. return err ?: cbc_encrypt_walk(req, &walk);
  358. }
  359. static int __maybe_unused essiv_cbc_decrypt(struct skcipher_request *req)
  360. {
  361. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  362. struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
  363. int err, rounds = 6 + ctx->key1.key_length / 4;
  364. struct skcipher_walk walk;
  365. unsigned int blocks;
  366. err = skcipher_walk_virt(&walk, req, false);
  367. blocks = walk.nbytes / AES_BLOCK_SIZE;
  368. if (blocks) {
  369. kernel_neon_begin();
  370. aes_essiv_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  371. ctx->key1.key_dec, rounds, blocks,
  372. req->iv, ctx->key2.key_enc);
  373. kernel_neon_end();
  374. err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
  375. }
  376. return err ?: cbc_decrypt_walk(req, &walk);
  377. }
  378. static int __maybe_unused xctr_encrypt(struct skcipher_request *req)
  379. {
  380. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  381. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  382. int err, rounds = 6 + ctx->key_length / 4;
  383. struct skcipher_walk walk;
  384. unsigned int byte_ctr = 0;
  385. err = skcipher_walk_virt(&walk, req, false);
  386. while (walk.nbytes > 0) {
  387. const u8 *src = walk.src.virt.addr;
  388. unsigned int nbytes = walk.nbytes;
  389. u8 *dst = walk.dst.virt.addr;
  390. u8 buf[AES_BLOCK_SIZE];
  391. /*
  392. * If given less than 16 bytes, we must copy the partial block
  393. * into a temporary buffer of 16 bytes to avoid out of bounds
  394. * reads and writes. Furthermore, this code is somewhat unusual
  395. * in that it expects the end of the data to be at the end of
  396. * the temporary buffer, rather than the start of the data at
  397. * the start of the temporary buffer.
  398. */
  399. if (unlikely(nbytes < AES_BLOCK_SIZE))
  400. src = dst = memcpy(buf + sizeof(buf) - nbytes,
  401. src, nbytes);
  402. else if (nbytes < walk.total)
  403. nbytes &= ~(AES_BLOCK_SIZE - 1);
  404. kernel_neon_begin();
  405. aes_xctr_encrypt(dst, src, ctx->key_enc, rounds, nbytes,
  406. walk.iv, byte_ctr);
  407. kernel_neon_end();
  408. if (unlikely(nbytes < AES_BLOCK_SIZE))
  409. memcpy(walk.dst.virt.addr,
  410. buf + sizeof(buf) - nbytes, nbytes);
  411. byte_ctr += nbytes;
  412. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  413. }
  414. return err;
  415. }
  416. static int __maybe_unused ctr_encrypt(struct skcipher_request *req)
  417. {
  418. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  419. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  420. int err, rounds = 6 + ctx->key_length / 4;
  421. struct skcipher_walk walk;
  422. err = skcipher_walk_virt(&walk, req, false);
  423. while (walk.nbytes > 0) {
  424. const u8 *src = walk.src.virt.addr;
  425. unsigned int nbytes = walk.nbytes;
  426. u8 *dst = walk.dst.virt.addr;
  427. u8 buf[AES_BLOCK_SIZE];
  428. /*
  429. * If given less than 16 bytes, we must copy the partial block
  430. * into a temporary buffer of 16 bytes to avoid out of bounds
  431. * reads and writes. Furthermore, this code is somewhat unusual
  432. * in that it expects the end of the data to be at the end of
  433. * the temporary buffer, rather than the start of the data at
  434. * the start of the temporary buffer.
  435. */
  436. if (unlikely(nbytes < AES_BLOCK_SIZE))
  437. src = dst = memcpy(buf + sizeof(buf) - nbytes,
  438. src, nbytes);
  439. else if (nbytes < walk.total)
  440. nbytes &= ~(AES_BLOCK_SIZE - 1);
  441. kernel_neon_begin();
  442. aes_ctr_encrypt(dst, src, ctx->key_enc, rounds, nbytes,
  443. walk.iv);
  444. kernel_neon_end();
  445. if (unlikely(nbytes < AES_BLOCK_SIZE))
  446. memcpy(walk.dst.virt.addr,
  447. buf + sizeof(buf) - nbytes, nbytes);
  448. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  449. }
  450. return err;
  451. }
  452. static int __maybe_unused xts_encrypt(struct skcipher_request *req)
  453. {
  454. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  455. struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  456. int err, first, rounds = 6 + ctx->key1.key_length / 4;
  457. int tail = req->cryptlen % AES_BLOCK_SIZE;
  458. struct scatterlist sg_src[2], sg_dst[2];
  459. struct skcipher_request subreq;
  460. struct scatterlist *src, *dst;
  461. struct skcipher_walk walk;
  462. if (req->cryptlen < AES_BLOCK_SIZE)
  463. return -EINVAL;
  464. err = skcipher_walk_virt(&walk, req, false);
  465. if (unlikely(tail > 0 && walk.nbytes < walk.total)) {
  466. int xts_blocks = DIV_ROUND_UP(req->cryptlen,
  467. AES_BLOCK_SIZE) - 2;
  468. skcipher_walk_abort(&walk);
  469. skcipher_request_set_tfm(&subreq, tfm);
  470. skcipher_request_set_callback(&subreq,
  471. skcipher_request_flags(req),
  472. NULL, NULL);
  473. skcipher_request_set_crypt(&subreq, req->src, req->dst,
  474. xts_blocks * AES_BLOCK_SIZE,
  475. req->iv);
  476. req = &subreq;
  477. err = skcipher_walk_virt(&walk, req, false);
  478. } else {
  479. tail = 0;
  480. }
  481. for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
  482. int nbytes = walk.nbytes;
  483. if (walk.nbytes < walk.total)
  484. nbytes &= ~(AES_BLOCK_SIZE - 1);
  485. kernel_neon_begin();
  486. aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  487. ctx->key1.key_enc, rounds, nbytes,
  488. ctx->key2.key_enc, walk.iv, first);
  489. kernel_neon_end();
  490. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  491. }
  492. if (err || likely(!tail))
  493. return err;
  494. dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
  495. if (req->dst != req->src)
  496. dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
  497. skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
  498. req->iv);
  499. err = skcipher_walk_virt(&walk, &subreq, false);
  500. if (err)
  501. return err;
  502. kernel_neon_begin();
  503. aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  504. ctx->key1.key_enc, rounds, walk.nbytes,
  505. ctx->key2.key_enc, walk.iv, first);
  506. kernel_neon_end();
  507. return skcipher_walk_done(&walk, 0);
  508. }
  509. static int __maybe_unused xts_decrypt(struct skcipher_request *req)
  510. {
  511. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  512. struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  513. int err, first, rounds = 6 + ctx->key1.key_length / 4;
  514. int tail = req->cryptlen % AES_BLOCK_SIZE;
  515. struct scatterlist sg_src[2], sg_dst[2];
  516. struct skcipher_request subreq;
  517. struct scatterlist *src, *dst;
  518. struct skcipher_walk walk;
  519. if (req->cryptlen < AES_BLOCK_SIZE)
  520. return -EINVAL;
  521. err = skcipher_walk_virt(&walk, req, false);
  522. if (unlikely(tail > 0 && walk.nbytes < walk.total)) {
  523. int xts_blocks = DIV_ROUND_UP(req->cryptlen,
  524. AES_BLOCK_SIZE) - 2;
  525. skcipher_walk_abort(&walk);
  526. skcipher_request_set_tfm(&subreq, tfm);
  527. skcipher_request_set_callback(&subreq,
  528. skcipher_request_flags(req),
  529. NULL, NULL);
  530. skcipher_request_set_crypt(&subreq, req->src, req->dst,
  531. xts_blocks * AES_BLOCK_SIZE,
  532. req->iv);
  533. req = &subreq;
  534. err = skcipher_walk_virt(&walk, req, false);
  535. } else {
  536. tail = 0;
  537. }
  538. for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
  539. int nbytes = walk.nbytes;
  540. if (walk.nbytes < walk.total)
  541. nbytes &= ~(AES_BLOCK_SIZE - 1);
  542. kernel_neon_begin();
  543. aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  544. ctx->key1.key_dec, rounds, nbytes,
  545. ctx->key2.key_enc, walk.iv, first);
  546. kernel_neon_end();
  547. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  548. }
  549. if (err || likely(!tail))
  550. return err;
  551. dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
  552. if (req->dst != req->src)
  553. dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
  554. skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
  555. req->iv);
  556. err = skcipher_walk_virt(&walk, &subreq, false);
  557. if (err)
  558. return err;
  559. kernel_neon_begin();
  560. aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  561. ctx->key1.key_dec, rounds, walk.nbytes,
  562. ctx->key2.key_enc, walk.iv, first);
  563. kernel_neon_end();
  564. return skcipher_walk_done(&walk, 0);
  565. }
  566. static struct skcipher_alg aes_algs[] = { {
  567. #if defined(USE_V8_CRYPTO_EXTENSIONS) || !IS_ENABLED(CONFIG_CRYPTO_AES_ARM64_BS)
  568. .base = {
  569. .cra_name = "ecb(aes)",
  570. .cra_driver_name = "ecb-aes-" MODE,
  571. .cra_priority = PRIO,
  572. .cra_blocksize = AES_BLOCK_SIZE,
  573. .cra_ctxsize = sizeof(struct crypto_aes_ctx),
  574. .cra_module = THIS_MODULE,
  575. },
  576. .min_keysize = AES_MIN_KEY_SIZE,
  577. .max_keysize = AES_MAX_KEY_SIZE,
  578. .setkey = skcipher_aes_setkey,
  579. .encrypt = ecb_encrypt,
  580. .decrypt = ecb_decrypt,
  581. }, {
  582. .base = {
  583. .cra_name = "cbc(aes)",
  584. .cra_driver_name = "cbc-aes-" MODE,
  585. .cra_priority = PRIO,
  586. .cra_blocksize = AES_BLOCK_SIZE,
  587. .cra_ctxsize = sizeof(struct crypto_aes_ctx),
  588. .cra_module = THIS_MODULE,
  589. },
  590. .min_keysize = AES_MIN_KEY_SIZE,
  591. .max_keysize = AES_MAX_KEY_SIZE,
  592. .ivsize = AES_BLOCK_SIZE,
  593. .setkey = skcipher_aes_setkey,
  594. .encrypt = cbc_encrypt,
  595. .decrypt = cbc_decrypt,
  596. }, {
  597. .base = {
  598. .cra_name = "ctr(aes)",
  599. .cra_driver_name = "ctr-aes-" MODE,
  600. .cra_priority = PRIO,
  601. .cra_blocksize = 1,
  602. .cra_ctxsize = sizeof(struct crypto_aes_ctx),
  603. .cra_module = THIS_MODULE,
  604. },
  605. .min_keysize = AES_MIN_KEY_SIZE,
  606. .max_keysize = AES_MAX_KEY_SIZE,
  607. .ivsize = AES_BLOCK_SIZE,
  608. .chunksize = AES_BLOCK_SIZE,
  609. .setkey = skcipher_aes_setkey,
  610. .encrypt = ctr_encrypt,
  611. .decrypt = ctr_encrypt,
  612. }, {
  613. .base = {
  614. .cra_name = "xctr(aes)",
  615. .cra_driver_name = "xctr-aes-" MODE,
  616. .cra_priority = PRIO,
  617. .cra_blocksize = 1,
  618. .cra_ctxsize = sizeof(struct crypto_aes_ctx),
  619. .cra_module = THIS_MODULE,
  620. },
  621. .min_keysize = AES_MIN_KEY_SIZE,
  622. .max_keysize = AES_MAX_KEY_SIZE,
  623. .ivsize = AES_BLOCK_SIZE,
  624. .chunksize = AES_BLOCK_SIZE,
  625. .setkey = skcipher_aes_setkey,
  626. .encrypt = xctr_encrypt,
  627. .decrypt = xctr_encrypt,
  628. }, {
  629. .base = {
  630. .cra_name = "xts(aes)",
  631. .cra_driver_name = "xts-aes-" MODE,
  632. .cra_priority = PRIO,
  633. .cra_blocksize = AES_BLOCK_SIZE,
  634. .cra_ctxsize = sizeof(struct crypto_aes_xts_ctx),
  635. .cra_module = THIS_MODULE,
  636. },
  637. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  638. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  639. .ivsize = AES_BLOCK_SIZE,
  640. .walksize = 2 * AES_BLOCK_SIZE,
  641. .setkey = xts_set_key,
  642. .encrypt = xts_encrypt,
  643. .decrypt = xts_decrypt,
  644. }, {
  645. #endif
  646. .base = {
  647. .cra_name = "cts(cbc(aes))",
  648. .cra_driver_name = "cts-cbc-aes-" MODE,
  649. .cra_priority = PRIO,
  650. .cra_blocksize = AES_BLOCK_SIZE,
  651. .cra_ctxsize = sizeof(struct crypto_aes_ctx),
  652. .cra_module = THIS_MODULE,
  653. },
  654. .min_keysize = AES_MIN_KEY_SIZE,
  655. .max_keysize = AES_MAX_KEY_SIZE,
  656. .ivsize = AES_BLOCK_SIZE,
  657. .walksize = 2 * AES_BLOCK_SIZE,
  658. .setkey = skcipher_aes_setkey,
  659. .encrypt = cts_cbc_encrypt,
  660. .decrypt = cts_cbc_decrypt,
  661. }, {
  662. .base = {
  663. .cra_name = "essiv(cbc(aes),sha256)",
  664. .cra_driver_name = "essiv-cbc-aes-sha256-" MODE,
  665. .cra_priority = PRIO + 1,
  666. .cra_blocksize = AES_BLOCK_SIZE,
  667. .cra_ctxsize = sizeof(struct crypto_aes_essiv_cbc_ctx),
  668. .cra_module = THIS_MODULE,
  669. },
  670. .min_keysize = AES_MIN_KEY_SIZE,
  671. .max_keysize = AES_MAX_KEY_SIZE,
  672. .ivsize = AES_BLOCK_SIZE,
  673. .setkey = essiv_cbc_set_key,
  674. .encrypt = essiv_cbc_encrypt,
  675. .decrypt = essiv_cbc_decrypt,
  676. .init = essiv_cbc_init_tfm,
  677. .exit = essiv_cbc_exit_tfm,
  678. } };
  679. static int cbcmac_setkey(struct crypto_shash *tfm, const u8 *in_key,
  680. unsigned int key_len)
  681. {
  682. struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
  683. return aes_expandkey(&ctx->key, in_key, key_len);
  684. }
  685. static void cmac_gf128_mul_by_x(be128 *y, const be128 *x)
  686. {
  687. u64 a = be64_to_cpu(x->a);
  688. u64 b = be64_to_cpu(x->b);
  689. y->a = cpu_to_be64((a << 1) | (b >> 63));
  690. y->b = cpu_to_be64((b << 1) ^ ((a >> 63) ? 0x87 : 0));
  691. }
  692. static int cmac_setkey(struct crypto_shash *tfm, const u8 *in_key,
  693. unsigned int key_len)
  694. {
  695. struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
  696. be128 *consts = (be128 *)ctx->consts;
  697. int rounds = 6 + key_len / 4;
  698. int err;
  699. err = cbcmac_setkey(tfm, in_key, key_len);
  700. if (err)
  701. return err;
  702. /* encrypt the zero vector */
  703. kernel_neon_begin();
  704. aes_ecb_encrypt(ctx->consts, (u8[AES_BLOCK_SIZE]){}, ctx->key.key_enc,
  705. rounds, 1);
  706. kernel_neon_end();
  707. cmac_gf128_mul_by_x(consts, consts);
  708. cmac_gf128_mul_by_x(consts + 1, consts);
  709. return 0;
  710. }
  711. static int xcbc_setkey(struct crypto_shash *tfm, const u8 *in_key,
  712. unsigned int key_len)
  713. {
  714. static u8 const ks[3][AES_BLOCK_SIZE] = {
  715. { [0 ... AES_BLOCK_SIZE - 1] = 0x1 },
  716. { [0 ... AES_BLOCK_SIZE - 1] = 0x2 },
  717. { [0 ... AES_BLOCK_SIZE - 1] = 0x3 },
  718. };
  719. struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
  720. int rounds = 6 + key_len / 4;
  721. u8 key[AES_BLOCK_SIZE];
  722. int err;
  723. err = cbcmac_setkey(tfm, in_key, key_len);
  724. if (err)
  725. return err;
  726. kernel_neon_begin();
  727. aes_ecb_encrypt(key, ks[0], ctx->key.key_enc, rounds, 1);
  728. aes_ecb_encrypt(ctx->consts, ks[1], ctx->key.key_enc, rounds, 2);
  729. kernel_neon_end();
  730. return cbcmac_setkey(tfm, key, sizeof(key));
  731. }
  732. static int mac_init(struct shash_desc *desc)
  733. {
  734. struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
  735. memset(ctx->dg, 0, AES_BLOCK_SIZE);
  736. ctx->len = 0;
  737. return 0;
  738. }
  739. static void mac_do_update(struct crypto_aes_ctx *ctx, u8 const in[], int blocks,
  740. u8 dg[], int enc_before, int enc_after)
  741. {
  742. int rounds = 6 + ctx->key_length / 4;
  743. if (crypto_simd_usable()) {
  744. int rem;
  745. do {
  746. kernel_neon_begin();
  747. rem = aes_mac_update(in, ctx->key_enc, rounds, blocks,
  748. dg, enc_before, enc_after);
  749. kernel_neon_end();
  750. in += (blocks - rem) * AES_BLOCK_SIZE;
  751. blocks = rem;
  752. enc_before = 0;
  753. } while (blocks);
  754. } else {
  755. if (enc_before)
  756. aes_encrypt(ctx, dg, dg);
  757. while (blocks--) {
  758. crypto_xor(dg, in, AES_BLOCK_SIZE);
  759. in += AES_BLOCK_SIZE;
  760. if (blocks || enc_after)
  761. aes_encrypt(ctx, dg, dg);
  762. }
  763. }
  764. }
  765. static int mac_update(struct shash_desc *desc, const u8 *p, unsigned int len)
  766. {
  767. struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
  768. struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
  769. while (len > 0) {
  770. unsigned int l;
  771. if ((ctx->len % AES_BLOCK_SIZE) == 0 &&
  772. (ctx->len + len) > AES_BLOCK_SIZE) {
  773. int blocks = len / AES_BLOCK_SIZE;
  774. len %= AES_BLOCK_SIZE;
  775. mac_do_update(&tctx->key, p, blocks, ctx->dg,
  776. (ctx->len != 0), (len != 0));
  777. p += blocks * AES_BLOCK_SIZE;
  778. if (!len) {
  779. ctx->len = AES_BLOCK_SIZE;
  780. break;
  781. }
  782. ctx->len = 0;
  783. }
  784. l = min(len, AES_BLOCK_SIZE - ctx->len);
  785. if (l <= AES_BLOCK_SIZE) {
  786. crypto_xor(ctx->dg + ctx->len, p, l);
  787. ctx->len += l;
  788. len -= l;
  789. p += l;
  790. }
  791. }
  792. return 0;
  793. }
  794. static int cbcmac_final(struct shash_desc *desc, u8 *out)
  795. {
  796. struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
  797. struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
  798. mac_do_update(&tctx->key, NULL, 0, ctx->dg, (ctx->len != 0), 0);
  799. memcpy(out, ctx->dg, AES_BLOCK_SIZE);
  800. return 0;
  801. }
  802. static int cmac_final(struct shash_desc *desc, u8 *out)
  803. {
  804. struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
  805. struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
  806. u8 *consts = tctx->consts;
  807. if (ctx->len != AES_BLOCK_SIZE) {
  808. ctx->dg[ctx->len] ^= 0x80;
  809. consts += AES_BLOCK_SIZE;
  810. }
  811. mac_do_update(&tctx->key, consts, 1, ctx->dg, 0, 1);
  812. memcpy(out, ctx->dg, AES_BLOCK_SIZE);
  813. return 0;
  814. }
  815. static struct shash_alg mac_algs[] = { {
  816. .base.cra_name = "cmac(aes)",
  817. .base.cra_driver_name = "cmac-aes-" MODE,
  818. .base.cra_priority = PRIO,
  819. .base.cra_blocksize = AES_BLOCK_SIZE,
  820. .base.cra_ctxsize = sizeof(struct mac_tfm_ctx) +
  821. 2 * AES_BLOCK_SIZE,
  822. .base.cra_module = THIS_MODULE,
  823. .digestsize = AES_BLOCK_SIZE,
  824. .init = mac_init,
  825. .update = mac_update,
  826. .final = cmac_final,
  827. .setkey = cmac_setkey,
  828. .descsize = sizeof(struct mac_desc_ctx),
  829. }, {
  830. .base.cra_name = "xcbc(aes)",
  831. .base.cra_driver_name = "xcbc-aes-" MODE,
  832. .base.cra_priority = PRIO,
  833. .base.cra_blocksize = AES_BLOCK_SIZE,
  834. .base.cra_ctxsize = sizeof(struct mac_tfm_ctx) +
  835. 2 * AES_BLOCK_SIZE,
  836. .base.cra_module = THIS_MODULE,
  837. .digestsize = AES_BLOCK_SIZE,
  838. .init = mac_init,
  839. .update = mac_update,
  840. .final = cmac_final,
  841. .setkey = xcbc_setkey,
  842. .descsize = sizeof(struct mac_desc_ctx),
  843. }, {
  844. .base.cra_name = "cbcmac(aes)",
  845. .base.cra_driver_name = "cbcmac-aes-" MODE,
  846. .base.cra_priority = PRIO,
  847. .base.cra_blocksize = 1,
  848. .base.cra_ctxsize = sizeof(struct mac_tfm_ctx),
  849. .base.cra_module = THIS_MODULE,
  850. .digestsize = AES_BLOCK_SIZE,
  851. .init = mac_init,
  852. .update = mac_update,
  853. .final = cbcmac_final,
  854. .setkey = cbcmac_setkey,
  855. .descsize = sizeof(struct mac_desc_ctx),
  856. } };
  857. static void aes_exit(void)
  858. {
  859. crypto_unregister_shashes(mac_algs, ARRAY_SIZE(mac_algs));
  860. crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
  861. }
  862. static int __init aes_init(void)
  863. {
  864. int err;
  865. err = crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
  866. if (err)
  867. return err;
  868. err = crypto_register_shashes(mac_algs, ARRAY_SIZE(mac_algs));
  869. if (err)
  870. goto unregister_ciphers;
  871. return 0;
  872. unregister_ciphers:
  873. crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
  874. return err;
  875. }
  876. #ifdef USE_V8_CRYPTO_EXTENSIONS
  877. module_cpu_feature_match(AES, aes_init);
  878. #else
  879. module_init(aes_init);
  880. EXPORT_SYMBOL(neon_aes_ecb_encrypt);
  881. EXPORT_SYMBOL(neon_aes_cbc_encrypt);
  882. EXPORT_SYMBOL(neon_aes_ctr_encrypt);
  883. EXPORT_SYMBOL(neon_aes_xts_encrypt);
  884. EXPORT_SYMBOL(neon_aes_xts_decrypt);
  885. #endif
  886. module_exit(aes_exit);