hctr2.c 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * HCTR2 length-preserving encryption mode
  4. *
  5. * Copyright 2021 Google LLC
  6. */
  7. /*
  8. * HCTR2 is a length-preserving encryption mode that is efficient on
  9. * processors with instructions to accelerate AES and carryless
  10. * multiplication, e.g. x86 processors with AES-NI and CLMUL, and ARM
  11. * processors with the ARMv8 crypto extensions.
  12. *
  13. * For more details, see the paper: "Length-preserving encryption with HCTR2"
  14. * (https://eprint.iacr.org/2021/1441.pdf)
  15. */
  16. #include <crypto/internal/cipher.h>
  17. #include <crypto/internal/hash.h>
  18. #include <crypto/internal/skcipher.h>
  19. #include <crypto/polyval.h>
  20. #include <crypto/scatterwalk.h>
  21. #include <linux/module.h>
  22. #define BLOCKCIPHER_BLOCK_SIZE 16
  23. /*
  24. * The specification allows variable-length tweaks, but Linux's crypto API
  25. * currently only allows algorithms to support a single length. The "natural"
  26. * tweak length for HCTR2 is 16, since that fits into one POLYVAL block for
  27. * the best performance. But longer tweaks are useful for fscrypt, to avoid
  28. * needing to derive per-file keys. So instead we use two blocks, or 32 bytes.
  29. */
  30. #define TWEAK_SIZE 32
  31. struct hctr2_instance_ctx {
  32. struct crypto_cipher_spawn blockcipher_spawn;
  33. struct crypto_skcipher_spawn xctr_spawn;
  34. struct crypto_shash_spawn polyval_spawn;
  35. };
  36. struct hctr2_tfm_ctx {
  37. struct crypto_cipher *blockcipher;
  38. struct crypto_skcipher *xctr;
  39. struct crypto_shash *polyval;
  40. u8 L[BLOCKCIPHER_BLOCK_SIZE];
  41. int hashed_tweak_offset;
  42. /*
  43. * This struct is allocated with extra space for two exported hash
  44. * states. Since the hash state size is not known at compile-time, we
  45. * can't add these to the struct directly.
  46. *
  47. * hashed_tweaklen_divisible;
  48. * hashed_tweaklen_remainder;
  49. */
  50. };
  51. struct hctr2_request_ctx {
  52. u8 first_block[BLOCKCIPHER_BLOCK_SIZE];
  53. u8 xctr_iv[BLOCKCIPHER_BLOCK_SIZE];
  54. struct scatterlist *bulk_part_dst;
  55. struct scatterlist *bulk_part_src;
  56. struct scatterlist sg_src[2];
  57. struct scatterlist sg_dst[2];
  58. /*
  59. * Sub-request sizes are unknown at compile-time, so they need to go
  60. * after the members with known sizes.
  61. */
  62. union {
  63. struct shash_desc hash_desc;
  64. struct skcipher_request xctr_req;
  65. } u;
  66. /*
  67. * This struct is allocated with extra space for one exported hash
  68. * state. Since the hash state size is not known at compile-time, we
  69. * can't add it to the struct directly.
  70. *
  71. * hashed_tweak;
  72. */
  73. };
  74. static inline u8 *hctr2_hashed_tweaklen(const struct hctr2_tfm_ctx *tctx,
  75. bool has_remainder)
  76. {
  77. u8 *p = (u8 *)tctx + sizeof(*tctx);
  78. if (has_remainder) /* For messages not a multiple of block length */
  79. p += crypto_shash_statesize(tctx->polyval);
  80. return p;
  81. }
  82. static inline u8 *hctr2_hashed_tweak(const struct hctr2_tfm_ctx *tctx,
  83. struct hctr2_request_ctx *rctx)
  84. {
  85. return (u8 *)rctx + tctx->hashed_tweak_offset;
  86. }
  87. /*
  88. * The input data for each HCTR2 hash step begins with a 16-byte block that
  89. * contains the tweak length and a flag that indicates whether the input is evenly
  90. * divisible into blocks. Since this implementation only supports one tweak
  91. * length, we precompute the two hash states resulting from hashing the two
  92. * possible values of this initial block. This reduces by one block the amount of
  93. * data that needs to be hashed for each encryption/decryption
  94. *
  95. * These precomputed hashes are stored in hctr2_tfm_ctx.
  96. */
  97. static int hctr2_hash_tweaklen(struct hctr2_tfm_ctx *tctx, bool has_remainder)
  98. {
  99. SHASH_DESC_ON_STACK(shash, tfm->polyval);
  100. __le64 tweak_length_block[2];
  101. int err;
  102. shash->tfm = tctx->polyval;
  103. memset(tweak_length_block, 0, sizeof(tweak_length_block));
  104. tweak_length_block[0] = cpu_to_le64(TWEAK_SIZE * 8 * 2 + 2 + has_remainder);
  105. err = crypto_shash_init(shash);
  106. if (err)
  107. return err;
  108. err = crypto_shash_update(shash, (u8 *)tweak_length_block,
  109. POLYVAL_BLOCK_SIZE);
  110. if (err)
  111. return err;
  112. return crypto_shash_export(shash, hctr2_hashed_tweaklen(tctx, has_remainder));
  113. }
  114. static int hctr2_setkey(struct crypto_skcipher *tfm, const u8 *key,
  115. unsigned int keylen)
  116. {
  117. struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
  118. u8 hbar[BLOCKCIPHER_BLOCK_SIZE];
  119. int err;
  120. crypto_cipher_clear_flags(tctx->blockcipher, CRYPTO_TFM_REQ_MASK);
  121. crypto_cipher_set_flags(tctx->blockcipher,
  122. crypto_skcipher_get_flags(tfm) &
  123. CRYPTO_TFM_REQ_MASK);
  124. err = crypto_cipher_setkey(tctx->blockcipher, key, keylen);
  125. if (err)
  126. return err;
  127. crypto_skcipher_clear_flags(tctx->xctr, CRYPTO_TFM_REQ_MASK);
  128. crypto_skcipher_set_flags(tctx->xctr,
  129. crypto_skcipher_get_flags(tfm) &
  130. CRYPTO_TFM_REQ_MASK);
  131. err = crypto_skcipher_setkey(tctx->xctr, key, keylen);
  132. if (err)
  133. return err;
  134. memset(hbar, 0, sizeof(hbar));
  135. crypto_cipher_encrypt_one(tctx->blockcipher, hbar, hbar);
  136. memset(tctx->L, 0, sizeof(tctx->L));
  137. tctx->L[0] = 0x01;
  138. crypto_cipher_encrypt_one(tctx->blockcipher, tctx->L, tctx->L);
  139. crypto_shash_clear_flags(tctx->polyval, CRYPTO_TFM_REQ_MASK);
  140. crypto_shash_set_flags(tctx->polyval, crypto_skcipher_get_flags(tfm) &
  141. CRYPTO_TFM_REQ_MASK);
  142. err = crypto_shash_setkey(tctx->polyval, hbar, BLOCKCIPHER_BLOCK_SIZE);
  143. if (err)
  144. return err;
  145. memzero_explicit(hbar, sizeof(hbar));
  146. return hctr2_hash_tweaklen(tctx, true) ?: hctr2_hash_tweaklen(tctx, false);
  147. }
  148. static int hctr2_hash_tweak(struct skcipher_request *req)
  149. {
  150. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  151. const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
  152. struct hctr2_request_ctx *rctx = skcipher_request_ctx(req);
  153. struct shash_desc *hash_desc = &rctx->u.hash_desc;
  154. int err;
  155. bool has_remainder = req->cryptlen % POLYVAL_BLOCK_SIZE;
  156. hash_desc->tfm = tctx->polyval;
  157. err = crypto_shash_import(hash_desc, hctr2_hashed_tweaklen(tctx, has_remainder));
  158. if (err)
  159. return err;
  160. err = crypto_shash_update(hash_desc, req->iv, TWEAK_SIZE);
  161. if (err)
  162. return err;
  163. // Store the hashed tweak, since we need it when computing both
  164. // H(T || N) and H(T || V).
  165. return crypto_shash_export(hash_desc, hctr2_hashed_tweak(tctx, rctx));
  166. }
  167. static int hctr2_hash_message(struct skcipher_request *req,
  168. struct scatterlist *sgl,
  169. u8 digest[POLYVAL_DIGEST_SIZE])
  170. {
  171. static const u8 padding[BLOCKCIPHER_BLOCK_SIZE] = { 0x1 };
  172. struct hctr2_request_ctx *rctx = skcipher_request_ctx(req);
  173. struct shash_desc *hash_desc = &rctx->u.hash_desc;
  174. const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE;
  175. struct sg_mapping_iter miter;
  176. unsigned int remainder = bulk_len % BLOCKCIPHER_BLOCK_SIZE;
  177. int i;
  178. int err = 0;
  179. int n = 0;
  180. sg_miter_start(&miter, sgl, sg_nents(sgl),
  181. SG_MITER_FROM_SG | SG_MITER_ATOMIC);
  182. for (i = 0; i < bulk_len; i += n) {
  183. sg_miter_next(&miter);
  184. n = min_t(unsigned int, miter.length, bulk_len - i);
  185. err = crypto_shash_update(hash_desc, miter.addr, n);
  186. if (err)
  187. break;
  188. }
  189. sg_miter_stop(&miter);
  190. if (err)
  191. return err;
  192. if (remainder) {
  193. err = crypto_shash_update(hash_desc, padding,
  194. BLOCKCIPHER_BLOCK_SIZE - remainder);
  195. if (err)
  196. return err;
  197. }
  198. return crypto_shash_final(hash_desc, digest);
  199. }
  200. static int hctr2_finish(struct skcipher_request *req)
  201. {
  202. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  203. const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
  204. struct hctr2_request_ctx *rctx = skcipher_request_ctx(req);
  205. u8 digest[POLYVAL_DIGEST_SIZE];
  206. struct shash_desc *hash_desc = &rctx->u.hash_desc;
  207. int err;
  208. // U = UU ^ H(T || V)
  209. // or M = MM ^ H(T || N)
  210. hash_desc->tfm = tctx->polyval;
  211. err = crypto_shash_import(hash_desc, hctr2_hashed_tweak(tctx, rctx));
  212. if (err)
  213. return err;
  214. err = hctr2_hash_message(req, rctx->bulk_part_dst, digest);
  215. if (err)
  216. return err;
  217. crypto_xor(rctx->first_block, digest, BLOCKCIPHER_BLOCK_SIZE);
  218. // Copy U (or M) into dst scatterlist
  219. scatterwalk_map_and_copy(rctx->first_block, req->dst,
  220. 0, BLOCKCIPHER_BLOCK_SIZE, 1);
  221. return 0;
  222. }
  223. static void hctr2_xctr_done(struct crypto_async_request *areq,
  224. int err)
  225. {
  226. struct skcipher_request *req = areq->data;
  227. if (!err)
  228. err = hctr2_finish(req);
  229. skcipher_request_complete(req, err);
  230. }
  231. static int hctr2_crypt(struct skcipher_request *req, bool enc)
  232. {
  233. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  234. const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
  235. struct hctr2_request_ctx *rctx = skcipher_request_ctx(req);
  236. u8 digest[POLYVAL_DIGEST_SIZE];
  237. int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE;
  238. int err;
  239. // Requests must be at least one block
  240. if (req->cryptlen < BLOCKCIPHER_BLOCK_SIZE)
  241. return -EINVAL;
  242. // Copy M (or U) into a temporary buffer
  243. scatterwalk_map_and_copy(rctx->first_block, req->src,
  244. 0, BLOCKCIPHER_BLOCK_SIZE, 0);
  245. // Create scatterlists for N and V
  246. rctx->bulk_part_src = scatterwalk_ffwd(rctx->sg_src, req->src,
  247. BLOCKCIPHER_BLOCK_SIZE);
  248. rctx->bulk_part_dst = scatterwalk_ffwd(rctx->sg_dst, req->dst,
  249. BLOCKCIPHER_BLOCK_SIZE);
  250. // MM = M ^ H(T || N)
  251. // or UU = U ^ H(T || V)
  252. err = hctr2_hash_tweak(req);
  253. if (err)
  254. return err;
  255. err = hctr2_hash_message(req, rctx->bulk_part_src, digest);
  256. if (err)
  257. return err;
  258. crypto_xor(digest, rctx->first_block, BLOCKCIPHER_BLOCK_SIZE);
  259. // UU = E(MM)
  260. // or MM = D(UU)
  261. if (enc)
  262. crypto_cipher_encrypt_one(tctx->blockcipher, rctx->first_block,
  263. digest);
  264. else
  265. crypto_cipher_decrypt_one(tctx->blockcipher, rctx->first_block,
  266. digest);
  267. // S = MM ^ UU ^ L
  268. crypto_xor(digest, rctx->first_block, BLOCKCIPHER_BLOCK_SIZE);
  269. crypto_xor_cpy(rctx->xctr_iv, digest, tctx->L, BLOCKCIPHER_BLOCK_SIZE);
  270. // V = XCTR(S, N)
  271. // or N = XCTR(S, V)
  272. skcipher_request_set_tfm(&rctx->u.xctr_req, tctx->xctr);
  273. skcipher_request_set_crypt(&rctx->u.xctr_req, rctx->bulk_part_src,
  274. rctx->bulk_part_dst, bulk_len,
  275. rctx->xctr_iv);
  276. skcipher_request_set_callback(&rctx->u.xctr_req,
  277. req->base.flags,
  278. hctr2_xctr_done, req);
  279. return crypto_skcipher_encrypt(&rctx->u.xctr_req) ?:
  280. hctr2_finish(req);
  281. }
  282. static int hctr2_encrypt(struct skcipher_request *req)
  283. {
  284. return hctr2_crypt(req, true);
  285. }
  286. static int hctr2_decrypt(struct skcipher_request *req)
  287. {
  288. return hctr2_crypt(req, false);
  289. }
  290. static int hctr2_init_tfm(struct crypto_skcipher *tfm)
  291. {
  292. struct skcipher_instance *inst = skcipher_alg_instance(tfm);
  293. struct hctr2_instance_ctx *ictx = skcipher_instance_ctx(inst);
  294. struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
  295. struct crypto_skcipher *xctr;
  296. struct crypto_cipher *blockcipher;
  297. struct crypto_shash *polyval;
  298. unsigned int subreq_size;
  299. int err;
  300. xctr = crypto_spawn_skcipher(&ictx->xctr_spawn);
  301. if (IS_ERR(xctr))
  302. return PTR_ERR(xctr);
  303. blockcipher = crypto_spawn_cipher(&ictx->blockcipher_spawn);
  304. if (IS_ERR(blockcipher)) {
  305. err = PTR_ERR(blockcipher);
  306. goto err_free_xctr;
  307. }
  308. polyval = crypto_spawn_shash(&ictx->polyval_spawn);
  309. if (IS_ERR(polyval)) {
  310. err = PTR_ERR(polyval);
  311. goto err_free_blockcipher;
  312. }
  313. tctx->xctr = xctr;
  314. tctx->blockcipher = blockcipher;
  315. tctx->polyval = polyval;
  316. BUILD_BUG_ON(offsetofend(struct hctr2_request_ctx, u) !=
  317. sizeof(struct hctr2_request_ctx));
  318. subreq_size = max(sizeof_field(struct hctr2_request_ctx, u.hash_desc) +
  319. crypto_shash_descsize(polyval),
  320. sizeof_field(struct hctr2_request_ctx, u.xctr_req) +
  321. crypto_skcipher_reqsize(xctr));
  322. tctx->hashed_tweak_offset = offsetof(struct hctr2_request_ctx, u) +
  323. subreq_size;
  324. crypto_skcipher_set_reqsize(tfm, tctx->hashed_tweak_offset +
  325. crypto_shash_statesize(polyval));
  326. return 0;
  327. err_free_blockcipher:
  328. crypto_free_cipher(blockcipher);
  329. err_free_xctr:
  330. crypto_free_skcipher(xctr);
  331. return err;
  332. }
  333. static void hctr2_exit_tfm(struct crypto_skcipher *tfm)
  334. {
  335. struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
  336. crypto_free_cipher(tctx->blockcipher);
  337. crypto_free_skcipher(tctx->xctr);
  338. crypto_free_shash(tctx->polyval);
  339. }
  340. static void hctr2_free_instance(struct skcipher_instance *inst)
  341. {
  342. struct hctr2_instance_ctx *ictx = skcipher_instance_ctx(inst);
  343. crypto_drop_cipher(&ictx->blockcipher_spawn);
  344. crypto_drop_skcipher(&ictx->xctr_spawn);
  345. crypto_drop_shash(&ictx->polyval_spawn);
  346. kfree(inst);
  347. }
  348. static int hctr2_create_common(struct crypto_template *tmpl,
  349. struct rtattr **tb,
  350. const char *xctr_name,
  351. const char *polyval_name)
  352. {
  353. u32 mask;
  354. struct skcipher_instance *inst;
  355. struct hctr2_instance_ctx *ictx;
  356. struct skcipher_alg *xctr_alg;
  357. struct crypto_alg *blockcipher_alg;
  358. struct shash_alg *polyval_alg;
  359. char blockcipher_name[CRYPTO_MAX_ALG_NAME];
  360. int len;
  361. int err;
  362. err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask);
  363. if (err)
  364. return err;
  365. inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
  366. if (!inst)
  367. return -ENOMEM;
  368. ictx = skcipher_instance_ctx(inst);
  369. /* Stream cipher, xctr(block_cipher) */
  370. err = crypto_grab_skcipher(&ictx->xctr_spawn,
  371. skcipher_crypto_instance(inst),
  372. xctr_name, 0, mask);
  373. if (err)
  374. goto err_free_inst;
  375. xctr_alg = crypto_spawn_skcipher_alg(&ictx->xctr_spawn);
  376. err = -EINVAL;
  377. if (strncmp(xctr_alg->base.cra_name, "xctr(", 5))
  378. goto err_free_inst;
  379. len = strscpy(blockcipher_name, xctr_alg->base.cra_name + 5,
  380. sizeof(blockcipher_name));
  381. if (len < 1)
  382. goto err_free_inst;
  383. if (blockcipher_name[len - 1] != ')')
  384. goto err_free_inst;
  385. blockcipher_name[len - 1] = 0;
  386. /* Block cipher, e.g. "aes" */
  387. err = crypto_grab_cipher(&ictx->blockcipher_spawn,
  388. skcipher_crypto_instance(inst),
  389. blockcipher_name, 0, mask);
  390. if (err)
  391. goto err_free_inst;
  392. blockcipher_alg = crypto_spawn_cipher_alg(&ictx->blockcipher_spawn);
  393. /* Require blocksize of 16 bytes */
  394. err = -EINVAL;
  395. if (blockcipher_alg->cra_blocksize != BLOCKCIPHER_BLOCK_SIZE)
  396. goto err_free_inst;
  397. /* Polyval ε-∆U hash function */
  398. err = crypto_grab_shash(&ictx->polyval_spawn,
  399. skcipher_crypto_instance(inst),
  400. polyval_name, 0, mask);
  401. if (err)
  402. goto err_free_inst;
  403. polyval_alg = crypto_spawn_shash_alg(&ictx->polyval_spawn);
  404. /* Ensure Polyval is being used */
  405. err = -EINVAL;
  406. if (strcmp(polyval_alg->base.cra_name, "polyval") != 0)
  407. goto err_free_inst;
  408. /* Instance fields */
  409. err = -ENAMETOOLONG;
  410. if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, "hctr2(%s)",
  411. blockcipher_alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
  412. goto err_free_inst;
  413. if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  414. "hctr2_base(%s,%s)",
  415. xctr_alg->base.cra_driver_name,
  416. polyval_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
  417. goto err_free_inst;
  418. inst->alg.base.cra_blocksize = BLOCKCIPHER_BLOCK_SIZE;
  419. inst->alg.base.cra_ctxsize = sizeof(struct hctr2_tfm_ctx) +
  420. polyval_alg->statesize * 2;
  421. inst->alg.base.cra_alignmask = xctr_alg->base.cra_alignmask |
  422. polyval_alg->base.cra_alignmask;
  423. /*
  424. * The hash function is called twice, so it is weighted higher than the
  425. * xctr and blockcipher.
  426. */
  427. inst->alg.base.cra_priority = (2 * xctr_alg->base.cra_priority +
  428. 4 * polyval_alg->base.cra_priority +
  429. blockcipher_alg->cra_priority) / 7;
  430. inst->alg.setkey = hctr2_setkey;
  431. inst->alg.encrypt = hctr2_encrypt;
  432. inst->alg.decrypt = hctr2_decrypt;
  433. inst->alg.init = hctr2_init_tfm;
  434. inst->alg.exit = hctr2_exit_tfm;
  435. inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(xctr_alg);
  436. inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(xctr_alg);
  437. inst->alg.ivsize = TWEAK_SIZE;
  438. inst->free = hctr2_free_instance;
  439. err = skcipher_register_instance(tmpl, inst);
  440. if (err) {
  441. err_free_inst:
  442. hctr2_free_instance(inst);
  443. }
  444. return err;
  445. }
  446. static int hctr2_create_base(struct crypto_template *tmpl, struct rtattr **tb)
  447. {
  448. const char *xctr_name;
  449. const char *polyval_name;
  450. xctr_name = crypto_attr_alg_name(tb[1]);
  451. if (IS_ERR(xctr_name))
  452. return PTR_ERR(xctr_name);
  453. polyval_name = crypto_attr_alg_name(tb[2]);
  454. if (IS_ERR(polyval_name))
  455. return PTR_ERR(polyval_name);
  456. return hctr2_create_common(tmpl, tb, xctr_name, polyval_name);
  457. }
  458. static int hctr2_create(struct crypto_template *tmpl, struct rtattr **tb)
  459. {
  460. const char *blockcipher_name;
  461. char xctr_name[CRYPTO_MAX_ALG_NAME];
  462. blockcipher_name = crypto_attr_alg_name(tb[1]);
  463. if (IS_ERR(blockcipher_name))
  464. return PTR_ERR(blockcipher_name);
  465. if (snprintf(xctr_name, CRYPTO_MAX_ALG_NAME, "xctr(%s)",
  466. blockcipher_name) >= CRYPTO_MAX_ALG_NAME)
  467. return -ENAMETOOLONG;
  468. return hctr2_create_common(tmpl, tb, xctr_name, "polyval");
  469. }
  470. static struct crypto_template hctr2_tmpls[] = {
  471. {
  472. /* hctr2_base(xctr_name, polyval_name) */
  473. .name = "hctr2_base",
  474. .create = hctr2_create_base,
  475. .module = THIS_MODULE,
  476. }, {
  477. /* hctr2(blockcipher_name) */
  478. .name = "hctr2",
  479. .create = hctr2_create,
  480. .module = THIS_MODULE,
  481. }
  482. };
  483. static int __init hctr2_module_init(void)
  484. {
  485. return crypto_register_templates(hctr2_tmpls, ARRAY_SIZE(hctr2_tmpls));
  486. }
  487. static void __exit hctr2_module_exit(void)
  488. {
  489. return crypto_unregister_templates(hctr2_tmpls,
  490. ARRAY_SIZE(hctr2_tmpls));
  491. }
  492. subsys_initcall(hctr2_module_init);
  493. module_exit(hctr2_module_exit);
  494. MODULE_DESCRIPTION("HCTR2 length-preserving encryption mode");
  495. MODULE_LICENSE("GPL v2");
  496. MODULE_ALIAS_CRYPTO("hctr2");
  497. MODULE_IMPORT_NS(CRYPTO_INTERNAL);