aes_glue.c 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /* Glue code for AES encryption optimized for sparc64 crypto opcodes.
  3. *
  4. * This is based largely upon arch/x86/crypto/aesni-intel_glue.c
  5. *
  6. * Copyright (C) 2008, Intel Corp.
  7. * Author: Huang Ying <[email protected]>
  8. *
  9. * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
  10. * interface for 64-bit kernels.
  11. * Authors: Adrian Hoban <[email protected]>
  12. * Gabriele Paoloni <[email protected]>
  13. * Tadeusz Struk ([email protected])
  14. * Aidan O'Mahony ([email protected])
  15. * Copyright (c) 2010, Intel Corporation.
  16. */
  17. #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
  18. #include <linux/crypto.h>
  19. #include <linux/init.h>
  20. #include <linux/module.h>
  21. #include <linux/mm.h>
  22. #include <linux/types.h>
  23. #include <crypto/algapi.h>
  24. #include <crypto/aes.h>
  25. #include <crypto/internal/skcipher.h>
  26. #include <asm/fpumacro.h>
  27. #include <asm/pstate.h>
  28. #include <asm/elf.h>
  29. #include "opcodes.h"
  30. struct aes_ops {
  31. void (*encrypt)(const u64 *key, const u32 *input, u32 *output);
  32. void (*decrypt)(const u64 *key, const u32 *input, u32 *output);
  33. void (*load_encrypt_keys)(const u64 *key);
  34. void (*load_decrypt_keys)(const u64 *key);
  35. void (*ecb_encrypt)(const u64 *key, const u64 *input, u64 *output,
  36. unsigned int len);
  37. void (*ecb_decrypt)(const u64 *key, const u64 *input, u64 *output,
  38. unsigned int len);
  39. void (*cbc_encrypt)(const u64 *key, const u64 *input, u64 *output,
  40. unsigned int len, u64 *iv);
  41. void (*cbc_decrypt)(const u64 *key, const u64 *input, u64 *output,
  42. unsigned int len, u64 *iv);
  43. void (*ctr_crypt)(const u64 *key, const u64 *input, u64 *output,
  44. unsigned int len, u64 *iv);
  45. };
  46. struct crypto_sparc64_aes_ctx {
  47. struct aes_ops *ops;
  48. u64 key[AES_MAX_KEYLENGTH / sizeof(u64)];
  49. u32 key_length;
  50. u32 expanded_key_length;
  51. };
  52. extern void aes_sparc64_encrypt_128(const u64 *key, const u32 *input,
  53. u32 *output);
  54. extern void aes_sparc64_encrypt_192(const u64 *key, const u32 *input,
  55. u32 *output);
  56. extern void aes_sparc64_encrypt_256(const u64 *key, const u32 *input,
  57. u32 *output);
  58. extern void aes_sparc64_decrypt_128(const u64 *key, const u32 *input,
  59. u32 *output);
  60. extern void aes_sparc64_decrypt_192(const u64 *key, const u32 *input,
  61. u32 *output);
  62. extern void aes_sparc64_decrypt_256(const u64 *key, const u32 *input,
  63. u32 *output);
  64. extern void aes_sparc64_load_encrypt_keys_128(const u64 *key);
  65. extern void aes_sparc64_load_encrypt_keys_192(const u64 *key);
  66. extern void aes_sparc64_load_encrypt_keys_256(const u64 *key);
  67. extern void aes_sparc64_load_decrypt_keys_128(const u64 *key);
  68. extern void aes_sparc64_load_decrypt_keys_192(const u64 *key);
  69. extern void aes_sparc64_load_decrypt_keys_256(const u64 *key);
  70. extern void aes_sparc64_ecb_encrypt_128(const u64 *key, const u64 *input,
  71. u64 *output, unsigned int len);
  72. extern void aes_sparc64_ecb_encrypt_192(const u64 *key, const u64 *input,
  73. u64 *output, unsigned int len);
  74. extern void aes_sparc64_ecb_encrypt_256(const u64 *key, const u64 *input,
  75. u64 *output, unsigned int len);
  76. extern void aes_sparc64_ecb_decrypt_128(const u64 *key, const u64 *input,
  77. u64 *output, unsigned int len);
  78. extern void aes_sparc64_ecb_decrypt_192(const u64 *key, const u64 *input,
  79. u64 *output, unsigned int len);
  80. extern void aes_sparc64_ecb_decrypt_256(const u64 *key, const u64 *input,
  81. u64 *output, unsigned int len);
  82. extern void aes_sparc64_cbc_encrypt_128(const u64 *key, const u64 *input,
  83. u64 *output, unsigned int len,
  84. u64 *iv);
  85. extern void aes_sparc64_cbc_encrypt_192(const u64 *key, const u64 *input,
  86. u64 *output, unsigned int len,
  87. u64 *iv);
  88. extern void aes_sparc64_cbc_encrypt_256(const u64 *key, const u64 *input,
  89. u64 *output, unsigned int len,
  90. u64 *iv);
  91. extern void aes_sparc64_cbc_decrypt_128(const u64 *key, const u64 *input,
  92. u64 *output, unsigned int len,
  93. u64 *iv);
  94. extern void aes_sparc64_cbc_decrypt_192(const u64 *key, const u64 *input,
  95. u64 *output, unsigned int len,
  96. u64 *iv);
  97. extern void aes_sparc64_cbc_decrypt_256(const u64 *key, const u64 *input,
  98. u64 *output, unsigned int len,
  99. u64 *iv);
  100. extern void aes_sparc64_ctr_crypt_128(const u64 *key, const u64 *input,
  101. u64 *output, unsigned int len,
  102. u64 *iv);
  103. extern void aes_sparc64_ctr_crypt_192(const u64 *key, const u64 *input,
  104. u64 *output, unsigned int len,
  105. u64 *iv);
  106. extern void aes_sparc64_ctr_crypt_256(const u64 *key, const u64 *input,
  107. u64 *output, unsigned int len,
  108. u64 *iv);
  109. static struct aes_ops aes128_ops = {
  110. .encrypt = aes_sparc64_encrypt_128,
  111. .decrypt = aes_sparc64_decrypt_128,
  112. .load_encrypt_keys = aes_sparc64_load_encrypt_keys_128,
  113. .load_decrypt_keys = aes_sparc64_load_decrypt_keys_128,
  114. .ecb_encrypt = aes_sparc64_ecb_encrypt_128,
  115. .ecb_decrypt = aes_sparc64_ecb_decrypt_128,
  116. .cbc_encrypt = aes_sparc64_cbc_encrypt_128,
  117. .cbc_decrypt = aes_sparc64_cbc_decrypt_128,
  118. .ctr_crypt = aes_sparc64_ctr_crypt_128,
  119. };
  120. static struct aes_ops aes192_ops = {
  121. .encrypt = aes_sparc64_encrypt_192,
  122. .decrypt = aes_sparc64_decrypt_192,
  123. .load_encrypt_keys = aes_sparc64_load_encrypt_keys_192,
  124. .load_decrypt_keys = aes_sparc64_load_decrypt_keys_192,
  125. .ecb_encrypt = aes_sparc64_ecb_encrypt_192,
  126. .ecb_decrypt = aes_sparc64_ecb_decrypt_192,
  127. .cbc_encrypt = aes_sparc64_cbc_encrypt_192,
  128. .cbc_decrypt = aes_sparc64_cbc_decrypt_192,
  129. .ctr_crypt = aes_sparc64_ctr_crypt_192,
  130. };
  131. static struct aes_ops aes256_ops = {
  132. .encrypt = aes_sparc64_encrypt_256,
  133. .decrypt = aes_sparc64_decrypt_256,
  134. .load_encrypt_keys = aes_sparc64_load_encrypt_keys_256,
  135. .load_decrypt_keys = aes_sparc64_load_decrypt_keys_256,
  136. .ecb_encrypt = aes_sparc64_ecb_encrypt_256,
  137. .ecb_decrypt = aes_sparc64_ecb_decrypt_256,
  138. .cbc_encrypt = aes_sparc64_cbc_encrypt_256,
  139. .cbc_decrypt = aes_sparc64_cbc_decrypt_256,
  140. .ctr_crypt = aes_sparc64_ctr_crypt_256,
  141. };
  142. extern void aes_sparc64_key_expand(const u32 *in_key, u64 *output_key,
  143. unsigned int key_len);
  144. static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  145. unsigned int key_len)
  146. {
  147. struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
  148. switch (key_len) {
  149. case AES_KEYSIZE_128:
  150. ctx->expanded_key_length = 0xb0;
  151. ctx->ops = &aes128_ops;
  152. break;
  153. case AES_KEYSIZE_192:
  154. ctx->expanded_key_length = 0xd0;
  155. ctx->ops = &aes192_ops;
  156. break;
  157. case AES_KEYSIZE_256:
  158. ctx->expanded_key_length = 0xf0;
  159. ctx->ops = &aes256_ops;
  160. break;
  161. default:
  162. return -EINVAL;
  163. }
  164. aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len);
  165. ctx->key_length = key_len;
  166. return 0;
  167. }
  168. static int aes_set_key_skcipher(struct crypto_skcipher *tfm, const u8 *in_key,
  169. unsigned int key_len)
  170. {
  171. return aes_set_key(crypto_skcipher_tfm(tfm), in_key, key_len);
  172. }
  173. static void crypto_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  174. {
  175. struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
  176. ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
  177. }
  178. static void crypto_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  179. {
  180. struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
  181. ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
  182. }
  183. static int ecb_encrypt(struct skcipher_request *req)
  184. {
  185. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  186. const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  187. struct skcipher_walk walk;
  188. unsigned int nbytes;
  189. int err;
  190. err = skcipher_walk_virt(&walk, req, true);
  191. if (err)
  192. return err;
  193. ctx->ops->load_encrypt_keys(&ctx->key[0]);
  194. while ((nbytes = walk.nbytes) != 0) {
  195. ctx->ops->ecb_encrypt(&ctx->key[0], walk.src.virt.addr,
  196. walk.dst.virt.addr,
  197. round_down(nbytes, AES_BLOCK_SIZE));
  198. err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
  199. }
  200. fprs_write(0);
  201. return err;
  202. }
  203. static int ecb_decrypt(struct skcipher_request *req)
  204. {
  205. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  206. const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  207. const u64 *key_end;
  208. struct skcipher_walk walk;
  209. unsigned int nbytes;
  210. int err;
  211. err = skcipher_walk_virt(&walk, req, true);
  212. if (err)
  213. return err;
  214. ctx->ops->load_decrypt_keys(&ctx->key[0]);
  215. key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
  216. while ((nbytes = walk.nbytes) != 0) {
  217. ctx->ops->ecb_decrypt(key_end, walk.src.virt.addr,
  218. walk.dst.virt.addr,
  219. round_down(nbytes, AES_BLOCK_SIZE));
  220. err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
  221. }
  222. fprs_write(0);
  223. return err;
  224. }
  225. static int cbc_encrypt(struct skcipher_request *req)
  226. {
  227. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  228. const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  229. struct skcipher_walk walk;
  230. unsigned int nbytes;
  231. int err;
  232. err = skcipher_walk_virt(&walk, req, true);
  233. if (err)
  234. return err;
  235. ctx->ops->load_encrypt_keys(&ctx->key[0]);
  236. while ((nbytes = walk.nbytes) != 0) {
  237. ctx->ops->cbc_encrypt(&ctx->key[0], walk.src.virt.addr,
  238. walk.dst.virt.addr,
  239. round_down(nbytes, AES_BLOCK_SIZE),
  240. walk.iv);
  241. err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
  242. }
  243. fprs_write(0);
  244. return err;
  245. }
  246. static int cbc_decrypt(struct skcipher_request *req)
  247. {
  248. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  249. const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  250. const u64 *key_end;
  251. struct skcipher_walk walk;
  252. unsigned int nbytes;
  253. int err;
  254. err = skcipher_walk_virt(&walk, req, true);
  255. if (err)
  256. return err;
  257. ctx->ops->load_decrypt_keys(&ctx->key[0]);
  258. key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
  259. while ((nbytes = walk.nbytes) != 0) {
  260. ctx->ops->cbc_decrypt(key_end, walk.src.virt.addr,
  261. walk.dst.virt.addr,
  262. round_down(nbytes, AES_BLOCK_SIZE),
  263. walk.iv);
  264. err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
  265. }
  266. fprs_write(0);
  267. return err;
  268. }
  269. static void ctr_crypt_final(const struct crypto_sparc64_aes_ctx *ctx,
  270. struct skcipher_walk *walk)
  271. {
  272. u8 *ctrblk = walk->iv;
  273. u64 keystream[AES_BLOCK_SIZE / sizeof(u64)];
  274. u8 *src = walk->src.virt.addr;
  275. u8 *dst = walk->dst.virt.addr;
  276. unsigned int nbytes = walk->nbytes;
  277. ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk,
  278. keystream, AES_BLOCK_SIZE);
  279. crypto_xor_cpy(dst, (u8 *) keystream, src, nbytes);
  280. crypto_inc(ctrblk, AES_BLOCK_SIZE);
  281. }
  282. static int ctr_crypt(struct skcipher_request *req)
  283. {
  284. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  285. const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  286. struct skcipher_walk walk;
  287. unsigned int nbytes;
  288. int err;
  289. err = skcipher_walk_virt(&walk, req, true);
  290. if (err)
  291. return err;
  292. ctx->ops->load_encrypt_keys(&ctx->key[0]);
  293. while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
  294. ctx->ops->ctr_crypt(&ctx->key[0], walk.src.virt.addr,
  295. walk.dst.virt.addr,
  296. round_down(nbytes, AES_BLOCK_SIZE),
  297. walk.iv);
  298. err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
  299. }
  300. if (walk.nbytes) {
  301. ctr_crypt_final(ctx, &walk);
  302. err = skcipher_walk_done(&walk, 0);
  303. }
  304. fprs_write(0);
  305. return err;
  306. }
  307. static struct crypto_alg cipher_alg = {
  308. .cra_name = "aes",
  309. .cra_driver_name = "aes-sparc64",
  310. .cra_priority = SPARC_CR_OPCODE_PRIORITY,
  311. .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
  312. .cra_blocksize = AES_BLOCK_SIZE,
  313. .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
  314. .cra_alignmask = 3,
  315. .cra_module = THIS_MODULE,
  316. .cra_u = {
  317. .cipher = {
  318. .cia_min_keysize = AES_MIN_KEY_SIZE,
  319. .cia_max_keysize = AES_MAX_KEY_SIZE,
  320. .cia_setkey = aes_set_key,
  321. .cia_encrypt = crypto_aes_encrypt,
  322. .cia_decrypt = crypto_aes_decrypt
  323. }
  324. }
  325. };
  326. static struct skcipher_alg skcipher_algs[] = {
  327. {
  328. .base.cra_name = "ecb(aes)",
  329. .base.cra_driver_name = "ecb-aes-sparc64",
  330. .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
  331. .base.cra_blocksize = AES_BLOCK_SIZE,
  332. .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
  333. .base.cra_alignmask = 7,
  334. .base.cra_module = THIS_MODULE,
  335. .min_keysize = AES_MIN_KEY_SIZE,
  336. .max_keysize = AES_MAX_KEY_SIZE,
  337. .setkey = aes_set_key_skcipher,
  338. .encrypt = ecb_encrypt,
  339. .decrypt = ecb_decrypt,
  340. }, {
  341. .base.cra_name = "cbc(aes)",
  342. .base.cra_driver_name = "cbc-aes-sparc64",
  343. .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
  344. .base.cra_blocksize = AES_BLOCK_SIZE,
  345. .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
  346. .base.cra_alignmask = 7,
  347. .base.cra_module = THIS_MODULE,
  348. .min_keysize = AES_MIN_KEY_SIZE,
  349. .max_keysize = AES_MAX_KEY_SIZE,
  350. .ivsize = AES_BLOCK_SIZE,
  351. .setkey = aes_set_key_skcipher,
  352. .encrypt = cbc_encrypt,
  353. .decrypt = cbc_decrypt,
  354. }, {
  355. .base.cra_name = "ctr(aes)",
  356. .base.cra_driver_name = "ctr-aes-sparc64",
  357. .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
  358. .base.cra_blocksize = 1,
  359. .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
  360. .base.cra_alignmask = 7,
  361. .base.cra_module = THIS_MODULE,
  362. .min_keysize = AES_MIN_KEY_SIZE,
  363. .max_keysize = AES_MAX_KEY_SIZE,
  364. .ivsize = AES_BLOCK_SIZE,
  365. .setkey = aes_set_key_skcipher,
  366. .encrypt = ctr_crypt,
  367. .decrypt = ctr_crypt,
  368. .chunksize = AES_BLOCK_SIZE,
  369. }
  370. };
  371. static bool __init sparc64_has_aes_opcode(void)
  372. {
  373. unsigned long cfr;
  374. if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
  375. return false;
  376. __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
  377. if (!(cfr & CFR_AES))
  378. return false;
  379. return true;
  380. }
  381. static int __init aes_sparc64_mod_init(void)
  382. {
  383. int err;
  384. if (!sparc64_has_aes_opcode()) {
  385. pr_info("sparc64 aes opcodes not available.\n");
  386. return -ENODEV;
  387. }
  388. pr_info("Using sparc64 aes opcodes optimized AES implementation\n");
  389. err = crypto_register_alg(&cipher_alg);
  390. if (err)
  391. return err;
  392. err = crypto_register_skciphers(skcipher_algs,
  393. ARRAY_SIZE(skcipher_algs));
  394. if (err)
  395. crypto_unregister_alg(&cipher_alg);
  396. return err;
  397. }
  398. static void __exit aes_sparc64_mod_fini(void)
  399. {
  400. crypto_unregister_alg(&cipher_alg);
  401. crypto_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
  402. }
  403. module_init(aes_sparc64_mod_init);
  404. module_exit(aes_sparc64_mod_fini);
  405. MODULE_LICENSE("GPL");
  406. MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, sparc64 aes opcode accelerated");
  407. MODULE_ALIAS_CRYPTO("aes");
  408. #include "crop_devid.c"