camellia_glue.c 8.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /* Glue code for CAMELLIA encryption optimized for sparc64 crypto opcodes.
  3. *
  4. * Copyright (C) 2012 David S. Miller <[email protected]>
  5. */
  6. #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
  7. #include <linux/crypto.h>
  8. #include <linux/init.h>
  9. #include <linux/module.h>
  10. #include <linux/mm.h>
  11. #include <linux/types.h>
  12. #include <crypto/algapi.h>
  13. #include <crypto/internal/skcipher.h>
  14. #include <asm/fpumacro.h>
  15. #include <asm/pstate.h>
  16. #include <asm/elf.h>
  17. #include "opcodes.h"
  18. #define CAMELLIA_MIN_KEY_SIZE 16
  19. #define CAMELLIA_MAX_KEY_SIZE 32
  20. #define CAMELLIA_BLOCK_SIZE 16
  21. #define CAMELLIA_TABLE_BYTE_LEN 272
  22. struct camellia_sparc64_ctx {
  23. u64 encrypt_key[CAMELLIA_TABLE_BYTE_LEN / sizeof(u64)];
  24. u64 decrypt_key[CAMELLIA_TABLE_BYTE_LEN / sizeof(u64)];
  25. int key_len;
  26. };
  27. extern void camellia_sparc64_key_expand(const u32 *in_key, u64 *encrypt_key,
  28. unsigned int key_len, u64 *decrypt_key);
  29. static int camellia_set_key(struct crypto_tfm *tfm, const u8 *_in_key,
  30. unsigned int key_len)
  31. {
  32. struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
  33. const u32 *in_key = (const u32 *) _in_key;
  34. if (key_len != 16 && key_len != 24 && key_len != 32)
  35. return -EINVAL;
  36. ctx->key_len = key_len;
  37. camellia_sparc64_key_expand(in_key, &ctx->encrypt_key[0],
  38. key_len, &ctx->decrypt_key[0]);
  39. return 0;
  40. }
  41. static int camellia_set_key_skcipher(struct crypto_skcipher *tfm,
  42. const u8 *in_key, unsigned int key_len)
  43. {
  44. return camellia_set_key(crypto_skcipher_tfm(tfm), in_key, key_len);
  45. }
  46. extern void camellia_sparc64_crypt(const u64 *key, const u32 *input,
  47. u32 *output, unsigned int key_len);
  48. static void camellia_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  49. {
  50. struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
  51. camellia_sparc64_crypt(&ctx->encrypt_key[0],
  52. (const u32 *) src,
  53. (u32 *) dst, ctx->key_len);
  54. }
  55. static void camellia_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  56. {
  57. struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
  58. camellia_sparc64_crypt(&ctx->decrypt_key[0],
  59. (const u32 *) src,
  60. (u32 *) dst, ctx->key_len);
  61. }
  62. extern void camellia_sparc64_load_keys(const u64 *key, unsigned int key_len);
  63. typedef void ecb_crypt_op(const u64 *input, u64 *output, unsigned int len,
  64. const u64 *key);
  65. extern ecb_crypt_op camellia_sparc64_ecb_crypt_3_grand_rounds;
  66. extern ecb_crypt_op camellia_sparc64_ecb_crypt_4_grand_rounds;
  67. static int __ecb_crypt(struct skcipher_request *req, bool encrypt)
  68. {
  69. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  70. const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm);
  71. struct skcipher_walk walk;
  72. ecb_crypt_op *op;
  73. const u64 *key;
  74. unsigned int nbytes;
  75. int err;
  76. op = camellia_sparc64_ecb_crypt_3_grand_rounds;
  77. if (ctx->key_len != 16)
  78. op = camellia_sparc64_ecb_crypt_4_grand_rounds;
  79. err = skcipher_walk_virt(&walk, req, true);
  80. if (err)
  81. return err;
  82. if (encrypt)
  83. key = &ctx->encrypt_key[0];
  84. else
  85. key = &ctx->decrypt_key[0];
  86. camellia_sparc64_load_keys(key, ctx->key_len);
  87. while ((nbytes = walk.nbytes) != 0) {
  88. op(walk.src.virt.addr, walk.dst.virt.addr,
  89. round_down(nbytes, CAMELLIA_BLOCK_SIZE), key);
  90. err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE);
  91. }
  92. fprs_write(0);
  93. return err;
  94. }
  95. static int ecb_encrypt(struct skcipher_request *req)
  96. {
  97. return __ecb_crypt(req, true);
  98. }
  99. static int ecb_decrypt(struct skcipher_request *req)
  100. {
  101. return __ecb_crypt(req, false);
  102. }
  103. typedef void cbc_crypt_op(const u64 *input, u64 *output, unsigned int len,
  104. const u64 *key, u64 *iv);
  105. extern cbc_crypt_op camellia_sparc64_cbc_encrypt_3_grand_rounds;
  106. extern cbc_crypt_op camellia_sparc64_cbc_encrypt_4_grand_rounds;
  107. extern cbc_crypt_op camellia_sparc64_cbc_decrypt_3_grand_rounds;
  108. extern cbc_crypt_op camellia_sparc64_cbc_decrypt_4_grand_rounds;
  109. static int cbc_encrypt(struct skcipher_request *req)
  110. {
  111. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  112. const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm);
  113. struct skcipher_walk walk;
  114. cbc_crypt_op *op;
  115. const u64 *key;
  116. unsigned int nbytes;
  117. int err;
  118. op = camellia_sparc64_cbc_encrypt_3_grand_rounds;
  119. if (ctx->key_len != 16)
  120. op = camellia_sparc64_cbc_encrypt_4_grand_rounds;
  121. err = skcipher_walk_virt(&walk, req, true);
  122. if (err)
  123. return err;
  124. key = &ctx->encrypt_key[0];
  125. camellia_sparc64_load_keys(key, ctx->key_len);
  126. while ((nbytes = walk.nbytes) != 0) {
  127. op(walk.src.virt.addr, walk.dst.virt.addr,
  128. round_down(nbytes, CAMELLIA_BLOCK_SIZE), key, walk.iv);
  129. err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE);
  130. }
  131. fprs_write(0);
  132. return err;
  133. }
  134. static int cbc_decrypt(struct skcipher_request *req)
  135. {
  136. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  137. const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm);
  138. struct skcipher_walk walk;
  139. cbc_crypt_op *op;
  140. const u64 *key;
  141. unsigned int nbytes;
  142. int err;
  143. op = camellia_sparc64_cbc_decrypt_3_grand_rounds;
  144. if (ctx->key_len != 16)
  145. op = camellia_sparc64_cbc_decrypt_4_grand_rounds;
  146. err = skcipher_walk_virt(&walk, req, true);
  147. if (err)
  148. return err;
  149. key = &ctx->decrypt_key[0];
  150. camellia_sparc64_load_keys(key, ctx->key_len);
  151. while ((nbytes = walk.nbytes) != 0) {
  152. op(walk.src.virt.addr, walk.dst.virt.addr,
  153. round_down(nbytes, CAMELLIA_BLOCK_SIZE), key, walk.iv);
  154. err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE);
  155. }
  156. fprs_write(0);
  157. return err;
  158. }
  159. static struct crypto_alg cipher_alg = {
  160. .cra_name = "camellia",
  161. .cra_driver_name = "camellia-sparc64",
  162. .cra_priority = SPARC_CR_OPCODE_PRIORITY,
  163. .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
  164. .cra_blocksize = CAMELLIA_BLOCK_SIZE,
  165. .cra_ctxsize = sizeof(struct camellia_sparc64_ctx),
  166. .cra_alignmask = 3,
  167. .cra_module = THIS_MODULE,
  168. .cra_u = {
  169. .cipher = {
  170. .cia_min_keysize = CAMELLIA_MIN_KEY_SIZE,
  171. .cia_max_keysize = CAMELLIA_MAX_KEY_SIZE,
  172. .cia_setkey = camellia_set_key,
  173. .cia_encrypt = camellia_encrypt,
  174. .cia_decrypt = camellia_decrypt
  175. }
  176. }
  177. };
  178. static struct skcipher_alg skcipher_algs[] = {
  179. {
  180. .base.cra_name = "ecb(camellia)",
  181. .base.cra_driver_name = "ecb-camellia-sparc64",
  182. .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
  183. .base.cra_blocksize = CAMELLIA_BLOCK_SIZE,
  184. .base.cra_ctxsize = sizeof(struct camellia_sparc64_ctx),
  185. .base.cra_alignmask = 7,
  186. .base.cra_module = THIS_MODULE,
  187. .min_keysize = CAMELLIA_MIN_KEY_SIZE,
  188. .max_keysize = CAMELLIA_MAX_KEY_SIZE,
  189. .setkey = camellia_set_key_skcipher,
  190. .encrypt = ecb_encrypt,
  191. .decrypt = ecb_decrypt,
  192. }, {
  193. .base.cra_name = "cbc(camellia)",
  194. .base.cra_driver_name = "cbc-camellia-sparc64",
  195. .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
  196. .base.cra_blocksize = CAMELLIA_BLOCK_SIZE,
  197. .base.cra_ctxsize = sizeof(struct camellia_sparc64_ctx),
  198. .base.cra_alignmask = 7,
  199. .base.cra_module = THIS_MODULE,
  200. .min_keysize = CAMELLIA_MIN_KEY_SIZE,
  201. .max_keysize = CAMELLIA_MAX_KEY_SIZE,
  202. .ivsize = CAMELLIA_BLOCK_SIZE,
  203. .setkey = camellia_set_key_skcipher,
  204. .encrypt = cbc_encrypt,
  205. .decrypt = cbc_decrypt,
  206. }
  207. };
  208. static bool __init sparc64_has_camellia_opcode(void)
  209. {
  210. unsigned long cfr;
  211. if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
  212. return false;
  213. __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
  214. if (!(cfr & CFR_CAMELLIA))
  215. return false;
  216. return true;
  217. }
  218. static int __init camellia_sparc64_mod_init(void)
  219. {
  220. int err;
  221. if (!sparc64_has_camellia_opcode()) {
  222. pr_info("sparc64 camellia opcodes not available.\n");
  223. return -ENODEV;
  224. }
  225. pr_info("Using sparc64 camellia opcodes optimized CAMELLIA implementation\n");
  226. err = crypto_register_alg(&cipher_alg);
  227. if (err)
  228. return err;
  229. err = crypto_register_skciphers(skcipher_algs,
  230. ARRAY_SIZE(skcipher_algs));
  231. if (err)
  232. crypto_unregister_alg(&cipher_alg);
  233. return err;
  234. }
  235. static void __exit camellia_sparc64_mod_fini(void)
  236. {
  237. crypto_unregister_alg(&cipher_alg);
  238. crypto_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
  239. }
  240. module_init(camellia_sparc64_mod_init);
  241. module_exit(camellia_sparc64_mod_fini);
  242. MODULE_LICENSE("GPL");
  243. MODULE_DESCRIPTION("Camellia Cipher Algorithm, sparc64 camellia opcode accelerated");
  244. MODULE_ALIAS_CRYPTO("camellia");
  245. #include "crop_devid.c"