sun8i-ss-cipher.c 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * sun8i-ss-cipher.c - hardware cryptographic offloader for
  4. * Allwinner A80/A83T SoC
  5. *
  6. * Copyright (C) 2016-2019 Corentin LABBE <[email protected]>
  7. *
  8. * This file add support for AES cipher with 128,192,256 bits keysize in
  9. * CBC and ECB mode.
  10. *
  11. * You could find a link for the datasheet in Documentation/arm/sunxi.rst
  12. */
  13. #include <linux/bottom_half.h>
  14. #include <linux/crypto.h>
  15. #include <linux/dma-mapping.h>
  16. #include <linux/io.h>
  17. #include <linux/pm_runtime.h>
  18. #include <crypto/scatterwalk.h>
  19. #include <crypto/internal/skcipher.h>
  20. #include "sun8i-ss.h"
  21. static bool sun8i_ss_need_fallback(struct skcipher_request *areq)
  22. {
  23. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
  24. struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
  25. struct sun8i_ss_alg_template *algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher);
  26. struct scatterlist *in_sg = areq->src;
  27. struct scatterlist *out_sg = areq->dst;
  28. struct scatterlist *sg;
  29. unsigned int todo, len;
  30. if (areq->cryptlen == 0 || areq->cryptlen % 16) {
  31. algt->stat_fb_len++;
  32. return true;
  33. }
  34. if (sg_nents_for_len(areq->src, areq->cryptlen) > 8 ||
  35. sg_nents_for_len(areq->dst, areq->cryptlen) > 8) {
  36. algt->stat_fb_sgnum++;
  37. return true;
  38. }
  39. len = areq->cryptlen;
  40. sg = areq->src;
  41. while (sg) {
  42. todo = min(len, sg->length);
  43. if ((todo % 16) != 0) {
  44. algt->stat_fb_sglen++;
  45. return true;
  46. }
  47. if (!IS_ALIGNED(sg->offset, 16)) {
  48. algt->stat_fb_align++;
  49. return true;
  50. }
  51. len -= todo;
  52. sg = sg_next(sg);
  53. }
  54. len = areq->cryptlen;
  55. sg = areq->dst;
  56. while (sg) {
  57. todo = min(len, sg->length);
  58. if ((todo % 16) != 0) {
  59. algt->stat_fb_sglen++;
  60. return true;
  61. }
  62. if (!IS_ALIGNED(sg->offset, 16)) {
  63. algt->stat_fb_align++;
  64. return true;
  65. }
  66. len -= todo;
  67. sg = sg_next(sg);
  68. }
  69. /* SS need same numbers of SG (with same length) for source and destination */
  70. in_sg = areq->src;
  71. out_sg = areq->dst;
  72. while (in_sg && out_sg) {
  73. if (in_sg->length != out_sg->length)
  74. return true;
  75. in_sg = sg_next(in_sg);
  76. out_sg = sg_next(out_sg);
  77. }
  78. if (in_sg || out_sg)
  79. return true;
  80. return false;
  81. }
  82. static int sun8i_ss_cipher_fallback(struct skcipher_request *areq)
  83. {
  84. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
  85. struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
  86. struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
  87. int err;
  88. #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
  89. struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
  90. struct sun8i_ss_alg_template *algt;
  91. algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher);
  92. algt->stat_fb++;
  93. #endif
  94. skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm);
  95. skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,
  96. areq->base.complete, areq->base.data);
  97. skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,
  98. areq->cryptlen, areq->iv);
  99. if (rctx->op_dir & SS_DECRYPTION)
  100. err = crypto_skcipher_decrypt(&rctx->fallback_req);
  101. else
  102. err = crypto_skcipher_encrypt(&rctx->fallback_req);
  103. return err;
  104. }
  105. static int sun8i_ss_setup_ivs(struct skcipher_request *areq)
  106. {
  107. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
  108. struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
  109. struct sun8i_ss_dev *ss = op->ss;
  110. struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
  111. struct scatterlist *sg = areq->src;
  112. unsigned int todo, offset;
  113. unsigned int len = areq->cryptlen;
  114. unsigned int ivsize = crypto_skcipher_ivsize(tfm);
  115. struct sun8i_ss_flow *sf = &ss->flows[rctx->flow];
  116. int i = 0;
  117. dma_addr_t a;
  118. int err;
  119. rctx->ivlen = ivsize;
  120. if (rctx->op_dir & SS_DECRYPTION) {
  121. offset = areq->cryptlen - ivsize;
  122. scatterwalk_map_and_copy(sf->biv, areq->src, offset,
  123. ivsize, 0);
  124. }
  125. /* we need to copy all IVs from source in case DMA is bi-directionnal */
  126. while (sg && len) {
  127. if (sg_dma_len(sg) == 0) {
  128. sg = sg_next(sg);
  129. continue;
  130. }
  131. if (i == 0)
  132. memcpy(sf->iv[0], areq->iv, ivsize);
  133. a = dma_map_single(ss->dev, sf->iv[i], ivsize, DMA_TO_DEVICE);
  134. if (dma_mapping_error(ss->dev, a)) {
  135. memzero_explicit(sf->iv[i], ivsize);
  136. dev_err(ss->dev, "Cannot DMA MAP IV\n");
  137. err = -EFAULT;
  138. goto dma_iv_error;
  139. }
  140. rctx->p_iv[i] = a;
  141. /* we need to setup all others IVs only in the decrypt way */
  142. if (rctx->op_dir == SS_ENCRYPTION)
  143. return 0;
  144. todo = min(len, sg_dma_len(sg));
  145. len -= todo;
  146. i++;
  147. if (i < MAX_SG) {
  148. offset = sg->length - ivsize;
  149. scatterwalk_map_and_copy(sf->iv[i], sg, offset, ivsize, 0);
  150. }
  151. rctx->niv = i;
  152. sg = sg_next(sg);
  153. }
  154. return 0;
  155. dma_iv_error:
  156. i--;
  157. while (i >= 0) {
  158. dma_unmap_single(ss->dev, rctx->p_iv[i], ivsize, DMA_TO_DEVICE);
  159. memzero_explicit(sf->iv[i], ivsize);
  160. i--;
  161. }
  162. return err;
  163. }
  164. static int sun8i_ss_cipher(struct skcipher_request *areq)
  165. {
  166. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
  167. struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
  168. struct sun8i_ss_dev *ss = op->ss;
  169. struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
  170. struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
  171. struct sun8i_ss_alg_template *algt;
  172. struct sun8i_ss_flow *sf = &ss->flows[rctx->flow];
  173. struct scatterlist *sg;
  174. unsigned int todo, len, offset, ivsize;
  175. int nr_sgs = 0;
  176. int nr_sgd = 0;
  177. int err = 0;
  178. int nsgs = sg_nents_for_len(areq->src, areq->cryptlen);
  179. int nsgd = sg_nents_for_len(areq->dst, areq->cryptlen);
  180. int i;
  181. algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher);
  182. dev_dbg(ss->dev, "%s %s %u %x IV(%p %u) key=%u\n", __func__,
  183. crypto_tfm_alg_name(areq->base.tfm),
  184. areq->cryptlen,
  185. rctx->op_dir, areq->iv, crypto_skcipher_ivsize(tfm),
  186. op->keylen);
  187. #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
  188. algt->stat_req++;
  189. #endif
  190. rctx->op_mode = ss->variant->op_mode[algt->ss_blockmode];
  191. rctx->method = ss->variant->alg_cipher[algt->ss_algo_id];
  192. rctx->keylen = op->keylen;
  193. rctx->p_key = dma_map_single(ss->dev, op->key, op->keylen, DMA_TO_DEVICE);
  194. if (dma_mapping_error(ss->dev, rctx->p_key)) {
  195. dev_err(ss->dev, "Cannot DMA MAP KEY\n");
  196. err = -EFAULT;
  197. goto theend;
  198. }
  199. ivsize = crypto_skcipher_ivsize(tfm);
  200. if (areq->iv && crypto_skcipher_ivsize(tfm) > 0) {
  201. err = sun8i_ss_setup_ivs(areq);
  202. if (err)
  203. goto theend_key;
  204. }
  205. if (areq->src == areq->dst) {
  206. nr_sgs = dma_map_sg(ss->dev, areq->src, nsgs, DMA_BIDIRECTIONAL);
  207. if (nr_sgs <= 0 || nr_sgs > 8) {
  208. dev_err(ss->dev, "Invalid sg number %d\n", nr_sgs);
  209. err = -EINVAL;
  210. goto theend_iv;
  211. }
  212. nr_sgd = nr_sgs;
  213. } else {
  214. nr_sgs = dma_map_sg(ss->dev, areq->src, nsgs, DMA_TO_DEVICE);
  215. if (nr_sgs <= 0 || nr_sgs > 8) {
  216. dev_err(ss->dev, "Invalid sg number %d\n", nr_sgs);
  217. err = -EINVAL;
  218. goto theend_iv;
  219. }
  220. nr_sgd = dma_map_sg(ss->dev, areq->dst, nsgd, DMA_FROM_DEVICE);
  221. if (nr_sgd <= 0 || nr_sgd > 8) {
  222. dev_err(ss->dev, "Invalid sg number %d\n", nr_sgd);
  223. err = -EINVAL;
  224. goto theend_sgs;
  225. }
  226. }
  227. len = areq->cryptlen;
  228. i = 0;
  229. sg = areq->src;
  230. while (i < nr_sgs && sg && len) {
  231. if (sg_dma_len(sg) == 0)
  232. goto sgs_next;
  233. rctx->t_src[i].addr = sg_dma_address(sg);
  234. todo = min(len, sg_dma_len(sg));
  235. rctx->t_src[i].len = todo / 4;
  236. dev_dbg(ss->dev, "%s total=%u SGS(%d %u off=%d) todo=%u\n", __func__,
  237. areq->cryptlen, i, rctx->t_src[i].len, sg->offset, todo);
  238. len -= todo;
  239. i++;
  240. sgs_next:
  241. sg = sg_next(sg);
  242. }
  243. if (len > 0) {
  244. dev_err(ss->dev, "remaining len %d\n", len);
  245. err = -EINVAL;
  246. goto theend_sgs;
  247. }
  248. len = areq->cryptlen;
  249. i = 0;
  250. sg = areq->dst;
  251. while (i < nr_sgd && sg && len) {
  252. if (sg_dma_len(sg) == 0)
  253. goto sgd_next;
  254. rctx->t_dst[i].addr = sg_dma_address(sg);
  255. todo = min(len, sg_dma_len(sg));
  256. rctx->t_dst[i].len = todo / 4;
  257. dev_dbg(ss->dev, "%s total=%u SGD(%d %u off=%d) todo=%u\n", __func__,
  258. areq->cryptlen, i, rctx->t_dst[i].len, sg->offset, todo);
  259. len -= todo;
  260. i++;
  261. sgd_next:
  262. sg = sg_next(sg);
  263. }
  264. if (len > 0) {
  265. dev_err(ss->dev, "remaining len %d\n", len);
  266. err = -EINVAL;
  267. goto theend_sgs;
  268. }
  269. err = sun8i_ss_run_task(ss, rctx, crypto_tfm_alg_name(areq->base.tfm));
  270. theend_sgs:
  271. if (areq->src == areq->dst) {
  272. dma_unmap_sg(ss->dev, areq->src, nsgs, DMA_BIDIRECTIONAL);
  273. } else {
  274. dma_unmap_sg(ss->dev, areq->src, nsgs, DMA_TO_DEVICE);
  275. dma_unmap_sg(ss->dev, areq->dst, nsgd, DMA_FROM_DEVICE);
  276. }
  277. theend_iv:
  278. if (areq->iv && ivsize > 0) {
  279. for (i = 0; i < rctx->niv; i++) {
  280. dma_unmap_single(ss->dev, rctx->p_iv[i], ivsize, DMA_TO_DEVICE);
  281. memzero_explicit(sf->iv[i], ivsize);
  282. }
  283. offset = areq->cryptlen - ivsize;
  284. if (rctx->op_dir & SS_DECRYPTION) {
  285. memcpy(areq->iv, sf->biv, ivsize);
  286. memzero_explicit(sf->biv, ivsize);
  287. } else {
  288. scatterwalk_map_and_copy(areq->iv, areq->dst, offset,
  289. ivsize, 0);
  290. }
  291. }
  292. theend_key:
  293. dma_unmap_single(ss->dev, rctx->p_key, op->keylen, DMA_TO_DEVICE);
  294. theend:
  295. return err;
  296. }
  297. static int sun8i_ss_handle_cipher_request(struct crypto_engine *engine, void *areq)
  298. {
  299. int err;
  300. struct skcipher_request *breq = container_of(areq, struct skcipher_request, base);
  301. err = sun8i_ss_cipher(breq);
  302. local_bh_disable();
  303. crypto_finalize_skcipher_request(engine, breq, err);
  304. local_bh_enable();
  305. return 0;
  306. }
  307. int sun8i_ss_skdecrypt(struct skcipher_request *areq)
  308. {
  309. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
  310. struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
  311. struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
  312. struct crypto_engine *engine;
  313. int e;
  314. memset(rctx, 0, sizeof(struct sun8i_cipher_req_ctx));
  315. rctx->op_dir = SS_DECRYPTION;
  316. if (sun8i_ss_need_fallback(areq))
  317. return sun8i_ss_cipher_fallback(areq);
  318. e = sun8i_ss_get_engine_number(op->ss);
  319. engine = op->ss->flows[e].engine;
  320. rctx->flow = e;
  321. return crypto_transfer_skcipher_request_to_engine(engine, areq);
  322. }
  323. int sun8i_ss_skencrypt(struct skcipher_request *areq)
  324. {
  325. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
  326. struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
  327. struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
  328. struct crypto_engine *engine;
  329. int e;
  330. memset(rctx, 0, sizeof(struct sun8i_cipher_req_ctx));
  331. rctx->op_dir = SS_ENCRYPTION;
  332. if (sun8i_ss_need_fallback(areq))
  333. return sun8i_ss_cipher_fallback(areq);
  334. e = sun8i_ss_get_engine_number(op->ss);
  335. engine = op->ss->flows[e].engine;
  336. rctx->flow = e;
  337. return crypto_transfer_skcipher_request_to_engine(engine, areq);
  338. }
  339. int sun8i_ss_cipher_init(struct crypto_tfm *tfm)
  340. {
  341. struct sun8i_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm);
  342. struct sun8i_ss_alg_template *algt;
  343. const char *name = crypto_tfm_alg_name(tfm);
  344. struct crypto_skcipher *sktfm = __crypto_skcipher_cast(tfm);
  345. struct skcipher_alg *alg = crypto_skcipher_alg(sktfm);
  346. int err;
  347. memset(op, 0, sizeof(struct sun8i_cipher_tfm_ctx));
  348. algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher);
  349. op->ss = algt->ss;
  350. op->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK);
  351. if (IS_ERR(op->fallback_tfm)) {
  352. dev_err(op->ss->dev, "ERROR: Cannot allocate fallback for %s %ld\n",
  353. name, PTR_ERR(op->fallback_tfm));
  354. return PTR_ERR(op->fallback_tfm);
  355. }
  356. sktfm->reqsize = sizeof(struct sun8i_cipher_req_ctx) +
  357. crypto_skcipher_reqsize(op->fallback_tfm);
  358. memcpy(algt->fbname,
  359. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(op->fallback_tfm)),
  360. CRYPTO_MAX_ALG_NAME);
  361. op->enginectx.op.do_one_request = sun8i_ss_handle_cipher_request;
  362. op->enginectx.op.prepare_request = NULL;
  363. op->enginectx.op.unprepare_request = NULL;
  364. err = pm_runtime_resume_and_get(op->ss->dev);
  365. if (err < 0) {
  366. dev_err(op->ss->dev, "pm error %d\n", err);
  367. goto error_pm;
  368. }
  369. return 0;
  370. error_pm:
  371. crypto_free_skcipher(op->fallback_tfm);
  372. return err;
  373. }
  374. void sun8i_ss_cipher_exit(struct crypto_tfm *tfm)
  375. {
  376. struct sun8i_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm);
  377. kfree_sensitive(op->key);
  378. crypto_free_skcipher(op->fallback_tfm);
  379. pm_runtime_put_sync(op->ss->dev);
  380. }
  381. int sun8i_ss_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
  382. unsigned int keylen)
  383. {
  384. struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
  385. struct sun8i_ss_dev *ss = op->ss;
  386. switch (keylen) {
  387. case 128 / 8:
  388. break;
  389. case 192 / 8:
  390. break;
  391. case 256 / 8:
  392. break;
  393. default:
  394. dev_dbg(ss->dev, "ERROR: Invalid keylen %u\n", keylen);
  395. return -EINVAL;
  396. }
  397. kfree_sensitive(op->key);
  398. op->keylen = keylen;
  399. op->key = kmemdup(key, keylen, GFP_KERNEL | GFP_DMA);
  400. if (!op->key)
  401. return -ENOMEM;
  402. crypto_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK);
  403. crypto_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK);
  404. return crypto_skcipher_setkey(op->fallback_tfm, key, keylen);
  405. }
  406. int sun8i_ss_des3_setkey(struct crypto_skcipher *tfm, const u8 *key,
  407. unsigned int keylen)
  408. {
  409. struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
  410. struct sun8i_ss_dev *ss = op->ss;
  411. if (unlikely(keylen != 3 * DES_KEY_SIZE)) {
  412. dev_dbg(ss->dev, "Invalid keylen %u\n", keylen);
  413. return -EINVAL;
  414. }
  415. kfree_sensitive(op->key);
  416. op->keylen = keylen;
  417. op->key = kmemdup(key, keylen, GFP_KERNEL | GFP_DMA);
  418. if (!op->key)
  419. return -ENOMEM;
  420. crypto_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK);
  421. crypto_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK);
  422. return crypto_skcipher_setkey(op->fallback_tfm, key, keylen);
  423. }