nitrox_aead.c 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567
  1. // SPDX-License-Identifier: GPL-2.0
  2. #include <linux/kernel.h>
  3. #include <linux/printk.h>
  4. #include <linux/crypto.h>
  5. #include <linux/rtnetlink.h>
  6. #include <crypto/aead.h>
  7. #include <crypto/authenc.h>
  8. #include <crypto/des.h>
  9. #include <crypto/internal/aead.h>
  10. #include <crypto/scatterwalk.h>
  11. #include <crypto/gcm.h>
  12. #include "nitrox_dev.h"
  13. #include "nitrox_common.h"
  14. #include "nitrox_req.h"
  15. #define GCM_AES_SALT_SIZE 4
  16. union gph_p3 {
  17. struct {
  18. #ifdef __BIG_ENDIAN_BITFIELD
  19. u16 iv_offset : 8;
  20. u16 auth_offset : 8;
  21. #else
  22. u16 auth_offset : 8;
  23. u16 iv_offset : 8;
  24. #endif
  25. };
  26. u16 param;
  27. };
  28. static int nitrox_aes_gcm_setkey(struct crypto_aead *aead, const u8 *key,
  29. unsigned int keylen)
  30. {
  31. int aes_keylen;
  32. struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
  33. struct flexi_crypto_context *fctx;
  34. union fc_ctx_flags flags;
  35. aes_keylen = flexi_aes_keylen(keylen);
  36. if (aes_keylen < 0)
  37. return -EINVAL;
  38. /* fill crypto context */
  39. fctx = nctx->u.fctx;
  40. flags.fu = be64_to_cpu(fctx->flags.f);
  41. flags.w0.aes_keylen = aes_keylen;
  42. fctx->flags.f = cpu_to_be64(flags.fu);
  43. /* copy enc key to context */
  44. memset(&fctx->crypto, 0, sizeof(fctx->crypto));
  45. memcpy(fctx->crypto.u.key, key, keylen);
  46. return 0;
  47. }
  48. static int nitrox_aead_setauthsize(struct crypto_aead *aead,
  49. unsigned int authsize)
  50. {
  51. struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
  52. struct flexi_crypto_context *fctx = nctx->u.fctx;
  53. union fc_ctx_flags flags;
  54. flags.fu = be64_to_cpu(fctx->flags.f);
  55. flags.w0.mac_len = authsize;
  56. fctx->flags.f = cpu_to_be64(flags.fu);
  57. aead->authsize = authsize;
  58. return 0;
  59. }
  60. static int nitrox_aes_gcm_setauthsize(struct crypto_aead *aead,
  61. unsigned int authsize)
  62. {
  63. switch (authsize) {
  64. case 4:
  65. case 8:
  66. case 12:
  67. case 13:
  68. case 14:
  69. case 15:
  70. case 16:
  71. break;
  72. default:
  73. return -EINVAL;
  74. }
  75. return nitrox_aead_setauthsize(aead, authsize);
  76. }
  77. static int alloc_src_sglist(struct nitrox_kcrypt_request *nkreq,
  78. struct scatterlist *src, char *iv, int ivsize,
  79. int buflen)
  80. {
  81. int nents = sg_nents_for_len(src, buflen);
  82. int ret;
  83. if (nents < 0)
  84. return nents;
  85. /* IV entry */
  86. nents += 1;
  87. /* Allocate buffer to hold IV and input scatterlist array */
  88. ret = alloc_src_req_buf(nkreq, nents, ivsize);
  89. if (ret)
  90. return ret;
  91. nitrox_creq_copy_iv(nkreq->src, iv, ivsize);
  92. nitrox_creq_set_src_sg(nkreq, nents, ivsize, src, buflen);
  93. return 0;
  94. }
  95. static int alloc_dst_sglist(struct nitrox_kcrypt_request *nkreq,
  96. struct scatterlist *dst, int ivsize, int buflen)
  97. {
  98. int nents = sg_nents_for_len(dst, buflen);
  99. int ret;
  100. if (nents < 0)
  101. return nents;
  102. /* IV, ORH, COMPLETION entries */
  103. nents += 3;
  104. /* Allocate buffer to hold ORH, COMPLETION and output scatterlist
  105. * array
  106. */
  107. ret = alloc_dst_req_buf(nkreq, nents);
  108. if (ret)
  109. return ret;
  110. nitrox_creq_set_orh(nkreq);
  111. nitrox_creq_set_comp(nkreq);
  112. nitrox_creq_set_dst_sg(nkreq, nents, ivsize, dst, buflen);
  113. return 0;
  114. }
  115. static void free_src_sglist(struct nitrox_kcrypt_request *nkreq)
  116. {
  117. kfree(nkreq->src);
  118. }
  119. static void free_dst_sglist(struct nitrox_kcrypt_request *nkreq)
  120. {
  121. kfree(nkreq->dst);
  122. }
  123. static int nitrox_set_creq(struct nitrox_aead_rctx *rctx)
  124. {
  125. struct se_crypto_request *creq = &rctx->nkreq.creq;
  126. union gph_p3 param3;
  127. int ret;
  128. creq->flags = rctx->flags;
  129. creq->gfp = (rctx->flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL :
  130. GFP_ATOMIC;
  131. creq->ctrl.value = 0;
  132. creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC;
  133. creq->ctrl.s.arg = rctx->ctrl_arg;
  134. creq->gph.param0 = cpu_to_be16(rctx->cryptlen);
  135. creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen);
  136. creq->gph.param2 = cpu_to_be16(rctx->ivsize + rctx->assoclen);
  137. param3.iv_offset = 0;
  138. param3.auth_offset = rctx->ivsize;
  139. creq->gph.param3 = cpu_to_be16(param3.param);
  140. creq->ctx_handle = rctx->ctx_handle;
  141. creq->ctrl.s.ctxl = sizeof(struct flexi_crypto_context);
  142. ret = alloc_src_sglist(&rctx->nkreq, rctx->src, rctx->iv, rctx->ivsize,
  143. rctx->srclen);
  144. if (ret)
  145. return ret;
  146. ret = alloc_dst_sglist(&rctx->nkreq, rctx->dst, rctx->ivsize,
  147. rctx->dstlen);
  148. if (ret) {
  149. free_src_sglist(&rctx->nkreq);
  150. return ret;
  151. }
  152. return 0;
  153. }
  154. static void nitrox_aead_callback(void *arg, int err)
  155. {
  156. struct aead_request *areq = arg;
  157. struct nitrox_aead_rctx *rctx = aead_request_ctx(areq);
  158. free_src_sglist(&rctx->nkreq);
  159. free_dst_sglist(&rctx->nkreq);
  160. if (err) {
  161. pr_err_ratelimited("request failed status 0x%0x\n", err);
  162. err = -EINVAL;
  163. }
  164. areq->base.complete(&areq->base, err);
  165. }
  166. static inline bool nitrox_aes_gcm_assoclen_supported(unsigned int assoclen)
  167. {
  168. if (assoclen <= 512)
  169. return true;
  170. return false;
  171. }
  172. static int nitrox_aes_gcm_enc(struct aead_request *areq)
  173. {
  174. struct crypto_aead *aead = crypto_aead_reqtfm(areq);
  175. struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
  176. struct nitrox_aead_rctx *rctx = aead_request_ctx(areq);
  177. struct se_crypto_request *creq = &rctx->nkreq.creq;
  178. struct flexi_crypto_context *fctx = nctx->u.fctx;
  179. int ret;
  180. if (!nitrox_aes_gcm_assoclen_supported(areq->assoclen))
  181. return -EINVAL;
  182. memcpy(fctx->crypto.iv, areq->iv, GCM_AES_SALT_SIZE);
  183. rctx->cryptlen = areq->cryptlen;
  184. rctx->assoclen = areq->assoclen;
  185. rctx->srclen = areq->assoclen + areq->cryptlen;
  186. rctx->dstlen = rctx->srclen + aead->authsize;
  187. rctx->iv = &areq->iv[GCM_AES_SALT_SIZE];
  188. rctx->ivsize = GCM_AES_IV_SIZE - GCM_AES_SALT_SIZE;
  189. rctx->flags = areq->base.flags;
  190. rctx->ctx_handle = nctx->u.ctx_handle;
  191. rctx->src = areq->src;
  192. rctx->dst = areq->dst;
  193. rctx->ctrl_arg = ENCRYPT;
  194. ret = nitrox_set_creq(rctx);
  195. if (ret)
  196. return ret;
  197. /* send the crypto request */
  198. return nitrox_process_se_request(nctx->ndev, creq, nitrox_aead_callback,
  199. areq);
  200. }
  201. static int nitrox_aes_gcm_dec(struct aead_request *areq)
  202. {
  203. struct crypto_aead *aead = crypto_aead_reqtfm(areq);
  204. struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
  205. struct nitrox_aead_rctx *rctx = aead_request_ctx(areq);
  206. struct se_crypto_request *creq = &rctx->nkreq.creq;
  207. struct flexi_crypto_context *fctx = nctx->u.fctx;
  208. int ret;
  209. if (!nitrox_aes_gcm_assoclen_supported(areq->assoclen))
  210. return -EINVAL;
  211. memcpy(fctx->crypto.iv, areq->iv, GCM_AES_SALT_SIZE);
  212. rctx->cryptlen = areq->cryptlen - aead->authsize;
  213. rctx->assoclen = areq->assoclen;
  214. rctx->srclen = areq->cryptlen + areq->assoclen;
  215. rctx->dstlen = rctx->srclen - aead->authsize;
  216. rctx->iv = &areq->iv[GCM_AES_SALT_SIZE];
  217. rctx->ivsize = GCM_AES_IV_SIZE - GCM_AES_SALT_SIZE;
  218. rctx->flags = areq->base.flags;
  219. rctx->ctx_handle = nctx->u.ctx_handle;
  220. rctx->src = areq->src;
  221. rctx->dst = areq->dst;
  222. rctx->ctrl_arg = DECRYPT;
  223. ret = nitrox_set_creq(rctx);
  224. if (ret)
  225. return ret;
  226. /* send the crypto request */
  227. return nitrox_process_se_request(nctx->ndev, creq, nitrox_aead_callback,
  228. areq);
  229. }
  230. static int nitrox_aead_init(struct crypto_aead *aead)
  231. {
  232. struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
  233. struct crypto_ctx_hdr *chdr;
  234. /* get the first device */
  235. nctx->ndev = nitrox_get_first_device();
  236. if (!nctx->ndev)
  237. return -ENODEV;
  238. /* allocate nitrox crypto context */
  239. chdr = crypto_alloc_context(nctx->ndev);
  240. if (!chdr) {
  241. nitrox_put_device(nctx->ndev);
  242. return -ENOMEM;
  243. }
  244. nctx->chdr = chdr;
  245. nctx->u.ctx_handle = (uintptr_t)((u8 *)chdr->vaddr +
  246. sizeof(struct ctx_hdr));
  247. nctx->u.fctx->flags.f = 0;
  248. return 0;
  249. }
  250. static int nitrox_gcm_common_init(struct crypto_aead *aead)
  251. {
  252. int ret;
  253. struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
  254. union fc_ctx_flags *flags;
  255. ret = nitrox_aead_init(aead);
  256. if (ret)
  257. return ret;
  258. flags = &nctx->u.fctx->flags;
  259. flags->w0.cipher_type = CIPHER_AES_GCM;
  260. flags->w0.hash_type = AUTH_NULL;
  261. flags->w0.iv_source = IV_FROM_DPTR;
  262. /* ask microcode to calculate ipad/opad */
  263. flags->w0.auth_input_type = 1;
  264. flags->f = cpu_to_be64(flags->fu);
  265. return 0;
  266. }
  267. static int nitrox_aes_gcm_init(struct crypto_aead *aead)
  268. {
  269. int ret;
  270. ret = nitrox_gcm_common_init(aead);
  271. if (ret)
  272. return ret;
  273. crypto_aead_set_reqsize(aead,
  274. sizeof(struct aead_request) +
  275. sizeof(struct nitrox_aead_rctx));
  276. return 0;
  277. }
  278. static void nitrox_aead_exit(struct crypto_aead *aead)
  279. {
  280. struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
  281. /* free the nitrox crypto context */
  282. if (nctx->u.ctx_handle) {
  283. struct flexi_crypto_context *fctx = nctx->u.fctx;
  284. memzero_explicit(&fctx->crypto, sizeof(struct crypto_keys));
  285. memzero_explicit(&fctx->auth, sizeof(struct auth_keys));
  286. crypto_free_context((void *)nctx->chdr);
  287. }
  288. nitrox_put_device(nctx->ndev);
  289. nctx->u.ctx_handle = 0;
  290. nctx->ndev = NULL;
  291. }
  292. static int nitrox_rfc4106_setkey(struct crypto_aead *aead, const u8 *key,
  293. unsigned int keylen)
  294. {
  295. struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
  296. struct flexi_crypto_context *fctx = nctx->u.fctx;
  297. int ret;
  298. if (keylen < GCM_AES_SALT_SIZE)
  299. return -EINVAL;
  300. keylen -= GCM_AES_SALT_SIZE;
  301. ret = nitrox_aes_gcm_setkey(aead, key, keylen);
  302. if (ret)
  303. return ret;
  304. memcpy(fctx->crypto.iv, key + keylen, GCM_AES_SALT_SIZE);
  305. return 0;
  306. }
  307. static int nitrox_rfc4106_setauthsize(struct crypto_aead *aead,
  308. unsigned int authsize)
  309. {
  310. switch (authsize) {
  311. case 8:
  312. case 12:
  313. case 16:
  314. break;
  315. default:
  316. return -EINVAL;
  317. }
  318. return nitrox_aead_setauthsize(aead, authsize);
  319. }
  320. static int nitrox_rfc4106_set_aead_rctx_sglist(struct aead_request *areq)
  321. {
  322. struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq);
  323. struct nitrox_aead_rctx *aead_rctx = &rctx->base;
  324. unsigned int assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE;
  325. struct scatterlist *sg;
  326. if (areq->assoclen != 16 && areq->assoclen != 20)
  327. return -EINVAL;
  328. scatterwalk_map_and_copy(rctx->assoc, areq->src, 0, assoclen, 0);
  329. sg_init_table(rctx->src, 3);
  330. sg_set_buf(rctx->src, rctx->assoc, assoclen);
  331. sg = scatterwalk_ffwd(rctx->src + 1, areq->src, areq->assoclen);
  332. if (sg != rctx->src + 1)
  333. sg_chain(rctx->src, 2, sg);
  334. if (areq->src != areq->dst) {
  335. sg_init_table(rctx->dst, 3);
  336. sg_set_buf(rctx->dst, rctx->assoc, assoclen);
  337. sg = scatterwalk_ffwd(rctx->dst + 1, areq->dst, areq->assoclen);
  338. if (sg != rctx->dst + 1)
  339. sg_chain(rctx->dst, 2, sg);
  340. }
  341. aead_rctx->src = rctx->src;
  342. aead_rctx->dst = (areq->src == areq->dst) ? rctx->src : rctx->dst;
  343. return 0;
  344. }
  345. static void nitrox_rfc4106_callback(void *arg, int err)
  346. {
  347. struct aead_request *areq = arg;
  348. struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq);
  349. struct nitrox_kcrypt_request *nkreq = &rctx->base.nkreq;
  350. free_src_sglist(nkreq);
  351. free_dst_sglist(nkreq);
  352. if (err) {
  353. pr_err_ratelimited("request failed status 0x%0x\n", err);
  354. err = -EINVAL;
  355. }
  356. areq->base.complete(&areq->base, err);
  357. }
  358. static int nitrox_rfc4106_enc(struct aead_request *areq)
  359. {
  360. struct crypto_aead *aead = crypto_aead_reqtfm(areq);
  361. struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
  362. struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq);
  363. struct nitrox_aead_rctx *aead_rctx = &rctx->base;
  364. struct se_crypto_request *creq = &aead_rctx->nkreq.creq;
  365. int ret;
  366. aead_rctx->cryptlen = areq->cryptlen;
  367. aead_rctx->assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE;
  368. aead_rctx->srclen = aead_rctx->assoclen + aead_rctx->cryptlen;
  369. aead_rctx->dstlen = aead_rctx->srclen + aead->authsize;
  370. aead_rctx->iv = areq->iv;
  371. aead_rctx->ivsize = GCM_RFC4106_IV_SIZE;
  372. aead_rctx->flags = areq->base.flags;
  373. aead_rctx->ctx_handle = nctx->u.ctx_handle;
  374. aead_rctx->ctrl_arg = ENCRYPT;
  375. ret = nitrox_rfc4106_set_aead_rctx_sglist(areq);
  376. if (ret)
  377. return ret;
  378. ret = nitrox_set_creq(aead_rctx);
  379. if (ret)
  380. return ret;
  381. /* send the crypto request */
  382. return nitrox_process_se_request(nctx->ndev, creq,
  383. nitrox_rfc4106_callback, areq);
  384. }
  385. static int nitrox_rfc4106_dec(struct aead_request *areq)
  386. {
  387. struct crypto_aead *aead = crypto_aead_reqtfm(areq);
  388. struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
  389. struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq);
  390. struct nitrox_aead_rctx *aead_rctx = &rctx->base;
  391. struct se_crypto_request *creq = &aead_rctx->nkreq.creq;
  392. int ret;
  393. aead_rctx->cryptlen = areq->cryptlen - aead->authsize;
  394. aead_rctx->assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE;
  395. aead_rctx->srclen =
  396. areq->cryptlen - GCM_RFC4106_IV_SIZE + areq->assoclen;
  397. aead_rctx->dstlen = aead_rctx->srclen - aead->authsize;
  398. aead_rctx->iv = areq->iv;
  399. aead_rctx->ivsize = GCM_RFC4106_IV_SIZE;
  400. aead_rctx->flags = areq->base.flags;
  401. aead_rctx->ctx_handle = nctx->u.ctx_handle;
  402. aead_rctx->ctrl_arg = DECRYPT;
  403. ret = nitrox_rfc4106_set_aead_rctx_sglist(areq);
  404. if (ret)
  405. return ret;
  406. ret = nitrox_set_creq(aead_rctx);
  407. if (ret)
  408. return ret;
  409. /* send the crypto request */
  410. return nitrox_process_se_request(nctx->ndev, creq,
  411. nitrox_rfc4106_callback, areq);
  412. }
  413. static int nitrox_rfc4106_init(struct crypto_aead *aead)
  414. {
  415. int ret;
  416. ret = nitrox_gcm_common_init(aead);
  417. if (ret)
  418. return ret;
  419. crypto_aead_set_reqsize(aead, sizeof(struct aead_request) +
  420. sizeof(struct nitrox_rfc4106_rctx));
  421. return 0;
  422. }
  423. static struct aead_alg nitrox_aeads[] = { {
  424. .base = {
  425. .cra_name = "gcm(aes)",
  426. .cra_driver_name = "n5_aes_gcm",
  427. .cra_priority = PRIO,
  428. .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
  429. .cra_blocksize = 1,
  430. .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
  431. .cra_alignmask = 0,
  432. .cra_module = THIS_MODULE,
  433. },
  434. .setkey = nitrox_aes_gcm_setkey,
  435. .setauthsize = nitrox_aes_gcm_setauthsize,
  436. .encrypt = nitrox_aes_gcm_enc,
  437. .decrypt = nitrox_aes_gcm_dec,
  438. .init = nitrox_aes_gcm_init,
  439. .exit = nitrox_aead_exit,
  440. .ivsize = GCM_AES_IV_SIZE,
  441. .maxauthsize = AES_BLOCK_SIZE,
  442. }, {
  443. .base = {
  444. .cra_name = "rfc4106(gcm(aes))",
  445. .cra_driver_name = "n5_rfc4106",
  446. .cra_priority = PRIO,
  447. .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
  448. .cra_blocksize = 1,
  449. .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
  450. .cra_alignmask = 0,
  451. .cra_module = THIS_MODULE,
  452. },
  453. .setkey = nitrox_rfc4106_setkey,
  454. .setauthsize = nitrox_rfc4106_setauthsize,
  455. .encrypt = nitrox_rfc4106_enc,
  456. .decrypt = nitrox_rfc4106_dec,
  457. .init = nitrox_rfc4106_init,
  458. .exit = nitrox_aead_exit,
  459. .ivsize = GCM_RFC4106_IV_SIZE,
  460. .maxauthsize = AES_BLOCK_SIZE,
  461. } };
  462. int nitrox_register_aeads(void)
  463. {
  464. return crypto_register_aeads(nitrox_aeads, ARRAY_SIZE(nitrox_aeads));
  465. }
  466. void nitrox_unregister_aeads(void)
  467. {
  468. crypto_unregister_aeads(nitrox_aeads, ARRAY_SIZE(nitrox_aeads));
  469. }