stm32-cryp.c 46 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * Copyright (C) STMicroelectronics SA 2017
  4. * Author: Fabien Dessenne <[email protected]>
  5. */
  6. #include <linux/clk.h>
  7. #include <linux/delay.h>
  8. #include <linux/interrupt.h>
  9. #include <linux/iopoll.h>
  10. #include <linux/module.h>
  11. #include <linux/of_device.h>
  12. #include <linux/platform_device.h>
  13. #include <linux/pm_runtime.h>
  14. #include <linux/reset.h>
  15. #include <crypto/aes.h>
  16. #include <crypto/internal/des.h>
  17. #include <crypto/engine.h>
  18. #include <crypto/scatterwalk.h>
  19. #include <crypto/internal/aead.h>
  20. #include <crypto/internal/skcipher.h>
  21. #define DRIVER_NAME "stm32-cryp"
  22. /* Bit [0] encrypt / decrypt */
  23. #define FLG_ENCRYPT BIT(0)
  24. /* Bit [8..1] algo & operation mode */
  25. #define FLG_AES BIT(1)
  26. #define FLG_DES BIT(2)
  27. #define FLG_TDES BIT(3)
  28. #define FLG_ECB BIT(4)
  29. #define FLG_CBC BIT(5)
  30. #define FLG_CTR BIT(6)
  31. #define FLG_GCM BIT(7)
  32. #define FLG_CCM BIT(8)
  33. /* Mode mask = bits [15..0] */
  34. #define FLG_MODE_MASK GENMASK(15, 0)
  35. /* Bit [31..16] status */
  36. /* Registers */
  37. #define CRYP_CR 0x00000000
  38. #define CRYP_SR 0x00000004
  39. #define CRYP_DIN 0x00000008
  40. #define CRYP_DOUT 0x0000000C
  41. #define CRYP_DMACR 0x00000010
  42. #define CRYP_IMSCR 0x00000014
  43. #define CRYP_RISR 0x00000018
  44. #define CRYP_MISR 0x0000001C
  45. #define CRYP_K0LR 0x00000020
  46. #define CRYP_K0RR 0x00000024
  47. #define CRYP_K1LR 0x00000028
  48. #define CRYP_K1RR 0x0000002C
  49. #define CRYP_K2LR 0x00000030
  50. #define CRYP_K2RR 0x00000034
  51. #define CRYP_K3LR 0x00000038
  52. #define CRYP_K3RR 0x0000003C
  53. #define CRYP_IV0LR 0x00000040
  54. #define CRYP_IV0RR 0x00000044
  55. #define CRYP_IV1LR 0x00000048
  56. #define CRYP_IV1RR 0x0000004C
  57. #define CRYP_CSGCMCCM0R 0x00000050
  58. #define CRYP_CSGCM0R 0x00000070
  59. /* Registers values */
  60. #define CR_DEC_NOT_ENC 0x00000004
  61. #define CR_TDES_ECB 0x00000000
  62. #define CR_TDES_CBC 0x00000008
  63. #define CR_DES_ECB 0x00000010
  64. #define CR_DES_CBC 0x00000018
  65. #define CR_AES_ECB 0x00000020
  66. #define CR_AES_CBC 0x00000028
  67. #define CR_AES_CTR 0x00000030
  68. #define CR_AES_KP 0x00000038
  69. #define CR_AES_GCM 0x00080000
  70. #define CR_AES_CCM 0x00080008
  71. #define CR_AES_UNKNOWN 0xFFFFFFFF
  72. #define CR_ALGO_MASK 0x00080038
  73. #define CR_DATA32 0x00000000
  74. #define CR_DATA16 0x00000040
  75. #define CR_DATA8 0x00000080
  76. #define CR_DATA1 0x000000C0
  77. #define CR_KEY128 0x00000000
  78. #define CR_KEY192 0x00000100
  79. #define CR_KEY256 0x00000200
  80. #define CR_FFLUSH 0x00004000
  81. #define CR_CRYPEN 0x00008000
  82. #define CR_PH_INIT 0x00000000
  83. #define CR_PH_HEADER 0x00010000
  84. #define CR_PH_PAYLOAD 0x00020000
  85. #define CR_PH_FINAL 0x00030000
  86. #define CR_PH_MASK 0x00030000
  87. #define CR_NBPBL_SHIFT 20
  88. #define SR_BUSY 0x00000010
  89. #define SR_OFNE 0x00000004
  90. #define IMSCR_IN BIT(0)
  91. #define IMSCR_OUT BIT(1)
  92. #define MISR_IN BIT(0)
  93. #define MISR_OUT BIT(1)
  94. /* Misc */
  95. #define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32))
  96. #define GCM_CTR_INIT 2
  97. #define CRYP_AUTOSUSPEND_DELAY 50
  98. struct stm32_cryp_caps {
  99. bool swap_final;
  100. bool padding_wa;
  101. };
  102. struct stm32_cryp_ctx {
  103. struct crypto_engine_ctx enginectx;
  104. struct stm32_cryp *cryp;
  105. int keylen;
  106. __be32 key[AES_KEYSIZE_256 / sizeof(u32)];
  107. unsigned long flags;
  108. };
  109. struct stm32_cryp_reqctx {
  110. unsigned long mode;
  111. };
  112. struct stm32_cryp {
  113. struct list_head list;
  114. struct device *dev;
  115. void __iomem *regs;
  116. struct clk *clk;
  117. unsigned long flags;
  118. u32 irq_status;
  119. const struct stm32_cryp_caps *caps;
  120. struct stm32_cryp_ctx *ctx;
  121. struct crypto_engine *engine;
  122. struct skcipher_request *req;
  123. struct aead_request *areq;
  124. size_t authsize;
  125. size_t hw_blocksize;
  126. size_t payload_in;
  127. size_t header_in;
  128. size_t payload_out;
  129. struct scatterlist *out_sg;
  130. struct scatter_walk in_walk;
  131. struct scatter_walk out_walk;
  132. __be32 last_ctr[4];
  133. u32 gcm_ctr;
  134. };
  135. struct stm32_cryp_list {
  136. struct list_head dev_list;
  137. spinlock_t lock; /* protect dev_list */
  138. };
  139. static struct stm32_cryp_list cryp_list = {
  140. .dev_list = LIST_HEAD_INIT(cryp_list.dev_list),
  141. .lock = __SPIN_LOCK_UNLOCKED(cryp_list.lock),
  142. };
  143. static inline bool is_aes(struct stm32_cryp *cryp)
  144. {
  145. return cryp->flags & FLG_AES;
  146. }
  147. static inline bool is_des(struct stm32_cryp *cryp)
  148. {
  149. return cryp->flags & FLG_DES;
  150. }
  151. static inline bool is_tdes(struct stm32_cryp *cryp)
  152. {
  153. return cryp->flags & FLG_TDES;
  154. }
  155. static inline bool is_ecb(struct stm32_cryp *cryp)
  156. {
  157. return cryp->flags & FLG_ECB;
  158. }
  159. static inline bool is_cbc(struct stm32_cryp *cryp)
  160. {
  161. return cryp->flags & FLG_CBC;
  162. }
  163. static inline bool is_ctr(struct stm32_cryp *cryp)
  164. {
  165. return cryp->flags & FLG_CTR;
  166. }
  167. static inline bool is_gcm(struct stm32_cryp *cryp)
  168. {
  169. return cryp->flags & FLG_GCM;
  170. }
  171. static inline bool is_ccm(struct stm32_cryp *cryp)
  172. {
  173. return cryp->flags & FLG_CCM;
  174. }
  175. static inline bool is_encrypt(struct stm32_cryp *cryp)
  176. {
  177. return cryp->flags & FLG_ENCRYPT;
  178. }
  179. static inline bool is_decrypt(struct stm32_cryp *cryp)
  180. {
  181. return !is_encrypt(cryp);
  182. }
  183. static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst)
  184. {
  185. return readl_relaxed(cryp->regs + ofst);
  186. }
  187. static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val)
  188. {
  189. writel_relaxed(val, cryp->regs + ofst);
  190. }
  191. static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp)
  192. {
  193. u32 status;
  194. return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
  195. !(status & SR_BUSY), 10, 100000);
  196. }
  197. static inline void stm32_cryp_enable(struct stm32_cryp *cryp)
  198. {
  199. writel_relaxed(readl_relaxed(cryp->regs + CRYP_CR) | CR_CRYPEN, cryp->regs + CRYP_CR);
  200. }
  201. static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp)
  202. {
  203. u32 status;
  204. return readl_relaxed_poll_timeout(cryp->regs + CRYP_CR, status,
  205. !(status & CR_CRYPEN), 10, 100000);
  206. }
  207. static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp)
  208. {
  209. u32 status;
  210. return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
  211. status & SR_OFNE, 10, 100000);
  212. }
  213. static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp);
  214. static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err);
  215. static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx)
  216. {
  217. struct stm32_cryp *tmp, *cryp = NULL;
  218. spin_lock_bh(&cryp_list.lock);
  219. if (!ctx->cryp) {
  220. list_for_each_entry(tmp, &cryp_list.dev_list, list) {
  221. cryp = tmp;
  222. break;
  223. }
  224. ctx->cryp = cryp;
  225. } else {
  226. cryp = ctx->cryp;
  227. }
  228. spin_unlock_bh(&cryp_list.lock);
  229. return cryp;
  230. }
  231. static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, __be32 *iv)
  232. {
  233. if (!iv)
  234. return;
  235. stm32_cryp_write(cryp, CRYP_IV0LR, be32_to_cpu(*iv++));
  236. stm32_cryp_write(cryp, CRYP_IV0RR, be32_to_cpu(*iv++));
  237. if (is_aes(cryp)) {
  238. stm32_cryp_write(cryp, CRYP_IV1LR, be32_to_cpu(*iv++));
  239. stm32_cryp_write(cryp, CRYP_IV1RR, be32_to_cpu(*iv++));
  240. }
  241. }
  242. static void stm32_cryp_get_iv(struct stm32_cryp *cryp)
  243. {
  244. struct skcipher_request *req = cryp->req;
  245. __be32 *tmp = (void *)req->iv;
  246. if (!tmp)
  247. return;
  248. *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0LR));
  249. *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0RR));
  250. if (is_aes(cryp)) {
  251. *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1LR));
  252. *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1RR));
  253. }
  254. }
  255. static void stm32_cryp_hw_write_key(struct stm32_cryp *c)
  256. {
  257. unsigned int i;
  258. int r_id;
  259. if (is_des(c)) {
  260. stm32_cryp_write(c, CRYP_K1LR, be32_to_cpu(c->ctx->key[0]));
  261. stm32_cryp_write(c, CRYP_K1RR, be32_to_cpu(c->ctx->key[1]));
  262. } else {
  263. r_id = CRYP_K3RR;
  264. for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4)
  265. stm32_cryp_write(c, r_id,
  266. be32_to_cpu(c->ctx->key[i - 1]));
  267. }
  268. }
  269. static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp)
  270. {
  271. if (is_aes(cryp) && is_ecb(cryp))
  272. return CR_AES_ECB;
  273. if (is_aes(cryp) && is_cbc(cryp))
  274. return CR_AES_CBC;
  275. if (is_aes(cryp) && is_ctr(cryp))
  276. return CR_AES_CTR;
  277. if (is_aes(cryp) && is_gcm(cryp))
  278. return CR_AES_GCM;
  279. if (is_aes(cryp) && is_ccm(cryp))
  280. return CR_AES_CCM;
  281. if (is_des(cryp) && is_ecb(cryp))
  282. return CR_DES_ECB;
  283. if (is_des(cryp) && is_cbc(cryp))
  284. return CR_DES_CBC;
  285. if (is_tdes(cryp) && is_ecb(cryp))
  286. return CR_TDES_ECB;
  287. if (is_tdes(cryp) && is_cbc(cryp))
  288. return CR_TDES_CBC;
  289. dev_err(cryp->dev, "Unknown mode\n");
  290. return CR_AES_UNKNOWN;
  291. }
  292. static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp)
  293. {
  294. return is_encrypt(cryp) ? cryp->areq->cryptlen :
  295. cryp->areq->cryptlen - cryp->authsize;
  296. }
  297. static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg)
  298. {
  299. int ret;
  300. __be32 iv[4];
  301. /* Phase 1 : init */
  302. memcpy(iv, cryp->areq->iv, 12);
  303. iv[3] = cpu_to_be32(GCM_CTR_INIT);
  304. cryp->gcm_ctr = GCM_CTR_INIT;
  305. stm32_cryp_hw_write_iv(cryp, iv);
  306. stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
  307. /* Wait for end of processing */
  308. ret = stm32_cryp_wait_enable(cryp);
  309. if (ret) {
  310. dev_err(cryp->dev, "Timeout (gcm init)\n");
  311. return ret;
  312. }
  313. /* Prepare next phase */
  314. if (cryp->areq->assoclen) {
  315. cfg |= CR_PH_HEADER;
  316. stm32_cryp_write(cryp, CRYP_CR, cfg);
  317. } else if (stm32_cryp_get_input_text_len(cryp)) {
  318. cfg |= CR_PH_PAYLOAD;
  319. stm32_cryp_write(cryp, CRYP_CR, cfg);
  320. }
  321. return 0;
  322. }
  323. static void stm32_crypt_gcmccm_end_header(struct stm32_cryp *cryp)
  324. {
  325. u32 cfg;
  326. int err;
  327. /* Check if whole header written */
  328. if (!cryp->header_in) {
  329. /* Wait for completion */
  330. err = stm32_cryp_wait_busy(cryp);
  331. if (err) {
  332. dev_err(cryp->dev, "Timeout (gcm/ccm header)\n");
  333. stm32_cryp_write(cryp, CRYP_IMSCR, 0);
  334. stm32_cryp_finish_req(cryp, err);
  335. return;
  336. }
  337. if (stm32_cryp_get_input_text_len(cryp)) {
  338. /* Phase 3 : payload */
  339. cfg = stm32_cryp_read(cryp, CRYP_CR);
  340. cfg &= ~CR_CRYPEN;
  341. stm32_cryp_write(cryp, CRYP_CR, cfg);
  342. cfg &= ~CR_PH_MASK;
  343. cfg |= CR_PH_PAYLOAD | CR_CRYPEN;
  344. stm32_cryp_write(cryp, CRYP_CR, cfg);
  345. } else {
  346. /*
  347. * Phase 4 : tag.
  348. * Nothing to read, nothing to write, caller have to
  349. * end request
  350. */
  351. }
  352. }
  353. }
  354. static void stm32_cryp_write_ccm_first_header(struct stm32_cryp *cryp)
  355. {
  356. unsigned int i;
  357. size_t written;
  358. size_t len;
  359. u32 alen = cryp->areq->assoclen;
  360. u32 block[AES_BLOCK_32] = {0};
  361. u8 *b8 = (u8 *)block;
  362. if (alen <= 65280) {
  363. /* Write first u32 of B1 */
  364. b8[0] = (alen >> 8) & 0xFF;
  365. b8[1] = alen & 0xFF;
  366. len = 2;
  367. } else {
  368. /* Build the two first u32 of B1 */
  369. b8[0] = 0xFF;
  370. b8[1] = 0xFE;
  371. b8[2] = (alen & 0xFF000000) >> 24;
  372. b8[3] = (alen & 0x00FF0000) >> 16;
  373. b8[4] = (alen & 0x0000FF00) >> 8;
  374. b8[5] = alen & 0x000000FF;
  375. len = 6;
  376. }
  377. written = min_t(size_t, AES_BLOCK_SIZE - len, alen);
  378. scatterwalk_copychunks((char *)block + len, &cryp->in_walk, written, 0);
  379. for (i = 0; i < AES_BLOCK_32; i++)
  380. stm32_cryp_write(cryp, CRYP_DIN, block[i]);
  381. cryp->header_in -= written;
  382. stm32_crypt_gcmccm_end_header(cryp);
  383. }
  384. static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg)
  385. {
  386. int ret;
  387. u32 iv_32[AES_BLOCK_32], b0_32[AES_BLOCK_32];
  388. u8 *iv = (u8 *)iv_32, *b0 = (u8 *)b0_32;
  389. __be32 *bd;
  390. u32 *d;
  391. unsigned int i, textlen;
  392. /* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */
  393. memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
  394. memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
  395. iv[AES_BLOCK_SIZE - 1] = 1;
  396. stm32_cryp_hw_write_iv(cryp, (__be32 *)iv);
  397. /* Build B0 */
  398. memcpy(b0, iv, AES_BLOCK_SIZE);
  399. b0[0] |= (8 * ((cryp->authsize - 2) / 2));
  400. if (cryp->areq->assoclen)
  401. b0[0] |= 0x40;
  402. textlen = stm32_cryp_get_input_text_len(cryp);
  403. b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
  404. b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
  405. /* Enable HW */
  406. stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
  407. /* Write B0 */
  408. d = (u32 *)b0;
  409. bd = (__be32 *)b0;
  410. for (i = 0; i < AES_BLOCK_32; i++) {
  411. u32 xd = d[i];
  412. if (!cryp->caps->padding_wa)
  413. xd = be32_to_cpu(bd[i]);
  414. stm32_cryp_write(cryp, CRYP_DIN, xd);
  415. }
  416. /* Wait for end of processing */
  417. ret = stm32_cryp_wait_enable(cryp);
  418. if (ret) {
  419. dev_err(cryp->dev, "Timeout (ccm init)\n");
  420. return ret;
  421. }
  422. /* Prepare next phase */
  423. if (cryp->areq->assoclen) {
  424. cfg |= CR_PH_HEADER | CR_CRYPEN;
  425. stm32_cryp_write(cryp, CRYP_CR, cfg);
  426. /* Write first (special) block (may move to next phase [payload]) */
  427. stm32_cryp_write_ccm_first_header(cryp);
  428. } else if (stm32_cryp_get_input_text_len(cryp)) {
  429. cfg |= CR_PH_PAYLOAD;
  430. stm32_cryp_write(cryp, CRYP_CR, cfg);
  431. }
  432. return 0;
  433. }
  434. static int stm32_cryp_hw_init(struct stm32_cryp *cryp)
  435. {
  436. int ret;
  437. u32 cfg, hw_mode;
  438. pm_runtime_get_sync(cryp->dev);
  439. /* Disable interrupt */
  440. stm32_cryp_write(cryp, CRYP_IMSCR, 0);
  441. /* Set configuration */
  442. cfg = CR_DATA8 | CR_FFLUSH;
  443. switch (cryp->ctx->keylen) {
  444. case AES_KEYSIZE_128:
  445. cfg |= CR_KEY128;
  446. break;
  447. case AES_KEYSIZE_192:
  448. cfg |= CR_KEY192;
  449. break;
  450. default:
  451. case AES_KEYSIZE_256:
  452. cfg |= CR_KEY256;
  453. break;
  454. }
  455. hw_mode = stm32_cryp_get_hw_mode(cryp);
  456. if (hw_mode == CR_AES_UNKNOWN)
  457. return -EINVAL;
  458. /* AES ECB/CBC decrypt: run key preparation first */
  459. if (is_decrypt(cryp) &&
  460. ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) {
  461. /* Configure in key preparation mode */
  462. stm32_cryp_write(cryp, CRYP_CR, cfg | CR_AES_KP);
  463. /* Set key only after full configuration done */
  464. stm32_cryp_hw_write_key(cryp);
  465. /* Start prepare key */
  466. stm32_cryp_enable(cryp);
  467. /* Wait for end of processing */
  468. ret = stm32_cryp_wait_busy(cryp);
  469. if (ret) {
  470. dev_err(cryp->dev, "Timeout (key preparation)\n");
  471. return ret;
  472. }
  473. cfg |= hw_mode | CR_DEC_NOT_ENC;
  474. /* Apply updated config (Decrypt + algo) and flush */
  475. stm32_cryp_write(cryp, CRYP_CR, cfg);
  476. } else {
  477. cfg |= hw_mode;
  478. if (is_decrypt(cryp))
  479. cfg |= CR_DEC_NOT_ENC;
  480. /* Apply config and flush */
  481. stm32_cryp_write(cryp, CRYP_CR, cfg);
  482. /* Set key only after configuration done */
  483. stm32_cryp_hw_write_key(cryp);
  484. }
  485. switch (hw_mode) {
  486. case CR_AES_GCM:
  487. case CR_AES_CCM:
  488. /* Phase 1 : init */
  489. if (hw_mode == CR_AES_CCM)
  490. ret = stm32_cryp_ccm_init(cryp, cfg);
  491. else
  492. ret = stm32_cryp_gcm_init(cryp, cfg);
  493. if (ret)
  494. return ret;
  495. break;
  496. case CR_DES_CBC:
  497. case CR_TDES_CBC:
  498. case CR_AES_CBC:
  499. case CR_AES_CTR:
  500. stm32_cryp_hw_write_iv(cryp, (__be32 *)cryp->req->iv);
  501. break;
  502. default:
  503. break;
  504. }
  505. /* Enable now */
  506. stm32_cryp_enable(cryp);
  507. return 0;
  508. }
  509. static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err)
  510. {
  511. if (!err && (is_gcm(cryp) || is_ccm(cryp)))
  512. /* Phase 4 : output tag */
  513. err = stm32_cryp_read_auth_tag(cryp);
  514. if (!err && (!(is_gcm(cryp) || is_ccm(cryp) || is_ecb(cryp))))
  515. stm32_cryp_get_iv(cryp);
  516. pm_runtime_mark_last_busy(cryp->dev);
  517. pm_runtime_put_autosuspend(cryp->dev);
  518. if (is_gcm(cryp) || is_ccm(cryp))
  519. crypto_finalize_aead_request(cryp->engine, cryp->areq, err);
  520. else
  521. crypto_finalize_skcipher_request(cryp->engine, cryp->req,
  522. err);
  523. }
  524. static int stm32_cryp_cpu_start(struct stm32_cryp *cryp)
  525. {
  526. /* Enable interrupt and let the IRQ handler do everything */
  527. stm32_cryp_write(cryp, CRYP_IMSCR, IMSCR_IN | IMSCR_OUT);
  528. return 0;
  529. }
  530. static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq);
  531. static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
  532. void *areq);
  533. static int stm32_cryp_init_tfm(struct crypto_skcipher *tfm)
  534. {
  535. struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
  536. crypto_skcipher_set_reqsize(tfm, sizeof(struct stm32_cryp_reqctx));
  537. ctx->enginectx.op.do_one_request = stm32_cryp_cipher_one_req;
  538. ctx->enginectx.op.prepare_request = stm32_cryp_prepare_cipher_req;
  539. ctx->enginectx.op.unprepare_request = NULL;
  540. return 0;
  541. }
  542. static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq);
  543. static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine,
  544. void *areq);
  545. static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm)
  546. {
  547. struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
  548. tfm->reqsize = sizeof(struct stm32_cryp_reqctx);
  549. ctx->enginectx.op.do_one_request = stm32_cryp_aead_one_req;
  550. ctx->enginectx.op.prepare_request = stm32_cryp_prepare_aead_req;
  551. ctx->enginectx.op.unprepare_request = NULL;
  552. return 0;
  553. }
  554. static int stm32_cryp_crypt(struct skcipher_request *req, unsigned long mode)
  555. {
  556. struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
  557. crypto_skcipher_reqtfm(req));
  558. struct stm32_cryp_reqctx *rctx = skcipher_request_ctx(req);
  559. struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
  560. if (!cryp)
  561. return -ENODEV;
  562. rctx->mode = mode;
  563. return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
  564. }
  565. static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode)
  566. {
  567. struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
  568. struct stm32_cryp_reqctx *rctx = aead_request_ctx(req);
  569. struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
  570. if (!cryp)
  571. return -ENODEV;
  572. rctx->mode = mode;
  573. return crypto_transfer_aead_request_to_engine(cryp->engine, req);
  574. }
  575. static int stm32_cryp_setkey(struct crypto_skcipher *tfm, const u8 *key,
  576. unsigned int keylen)
  577. {
  578. struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
  579. memcpy(ctx->key, key, keylen);
  580. ctx->keylen = keylen;
  581. return 0;
  582. }
  583. static int stm32_cryp_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
  584. unsigned int keylen)
  585. {
  586. if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
  587. keylen != AES_KEYSIZE_256)
  588. return -EINVAL;
  589. else
  590. return stm32_cryp_setkey(tfm, key, keylen);
  591. }
  592. static int stm32_cryp_des_setkey(struct crypto_skcipher *tfm, const u8 *key,
  593. unsigned int keylen)
  594. {
  595. return verify_skcipher_des_key(tfm, key) ?:
  596. stm32_cryp_setkey(tfm, key, keylen);
  597. }
  598. static int stm32_cryp_tdes_setkey(struct crypto_skcipher *tfm, const u8 *key,
  599. unsigned int keylen)
  600. {
  601. return verify_skcipher_des3_key(tfm, key) ?:
  602. stm32_cryp_setkey(tfm, key, keylen);
  603. }
  604. static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
  605. unsigned int keylen)
  606. {
  607. struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
  608. if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
  609. keylen != AES_KEYSIZE_256)
  610. return -EINVAL;
  611. memcpy(ctx->key, key, keylen);
  612. ctx->keylen = keylen;
  613. return 0;
  614. }
  615. static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm,
  616. unsigned int authsize)
  617. {
  618. switch (authsize) {
  619. case 4:
  620. case 8:
  621. case 12:
  622. case 13:
  623. case 14:
  624. case 15:
  625. case 16:
  626. break;
  627. default:
  628. return -EINVAL;
  629. }
  630. return 0;
  631. }
  632. static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm,
  633. unsigned int authsize)
  634. {
  635. switch (authsize) {
  636. case 4:
  637. case 6:
  638. case 8:
  639. case 10:
  640. case 12:
  641. case 14:
  642. case 16:
  643. break;
  644. default:
  645. return -EINVAL;
  646. }
  647. return 0;
  648. }
  649. static int stm32_cryp_aes_ecb_encrypt(struct skcipher_request *req)
  650. {
  651. if (req->cryptlen % AES_BLOCK_SIZE)
  652. return -EINVAL;
  653. if (req->cryptlen == 0)
  654. return 0;
  655. return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT);
  656. }
  657. static int stm32_cryp_aes_ecb_decrypt(struct skcipher_request *req)
  658. {
  659. if (req->cryptlen % AES_BLOCK_SIZE)
  660. return -EINVAL;
  661. if (req->cryptlen == 0)
  662. return 0;
  663. return stm32_cryp_crypt(req, FLG_AES | FLG_ECB);
  664. }
  665. static int stm32_cryp_aes_cbc_encrypt(struct skcipher_request *req)
  666. {
  667. if (req->cryptlen % AES_BLOCK_SIZE)
  668. return -EINVAL;
  669. if (req->cryptlen == 0)
  670. return 0;
  671. return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT);
  672. }
  673. static int stm32_cryp_aes_cbc_decrypt(struct skcipher_request *req)
  674. {
  675. if (req->cryptlen % AES_BLOCK_SIZE)
  676. return -EINVAL;
  677. if (req->cryptlen == 0)
  678. return 0;
  679. return stm32_cryp_crypt(req, FLG_AES | FLG_CBC);
  680. }
  681. static int stm32_cryp_aes_ctr_encrypt(struct skcipher_request *req)
  682. {
  683. if (req->cryptlen == 0)
  684. return 0;
  685. return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT);
  686. }
  687. static int stm32_cryp_aes_ctr_decrypt(struct skcipher_request *req)
  688. {
  689. if (req->cryptlen == 0)
  690. return 0;
  691. return stm32_cryp_crypt(req, FLG_AES | FLG_CTR);
  692. }
  693. static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req)
  694. {
  695. return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT);
  696. }
  697. static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req)
  698. {
  699. return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM);
  700. }
  701. static inline int crypto_ccm_check_iv(const u8 *iv)
  702. {
  703. /* 2 <= L <= 8, so 1 <= L' <= 7. */
  704. if (iv[0] < 1 || iv[0] > 7)
  705. return -EINVAL;
  706. return 0;
  707. }
  708. static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req)
  709. {
  710. int err;
  711. err = crypto_ccm_check_iv(req->iv);
  712. if (err)
  713. return err;
  714. return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT);
  715. }
  716. static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req)
  717. {
  718. int err;
  719. err = crypto_ccm_check_iv(req->iv);
  720. if (err)
  721. return err;
  722. return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM);
  723. }
  724. static int stm32_cryp_des_ecb_encrypt(struct skcipher_request *req)
  725. {
  726. if (req->cryptlen % DES_BLOCK_SIZE)
  727. return -EINVAL;
  728. if (req->cryptlen == 0)
  729. return 0;
  730. return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT);
  731. }
  732. static int stm32_cryp_des_ecb_decrypt(struct skcipher_request *req)
  733. {
  734. if (req->cryptlen % DES_BLOCK_SIZE)
  735. return -EINVAL;
  736. if (req->cryptlen == 0)
  737. return 0;
  738. return stm32_cryp_crypt(req, FLG_DES | FLG_ECB);
  739. }
  740. static int stm32_cryp_des_cbc_encrypt(struct skcipher_request *req)
  741. {
  742. if (req->cryptlen % DES_BLOCK_SIZE)
  743. return -EINVAL;
  744. if (req->cryptlen == 0)
  745. return 0;
  746. return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT);
  747. }
  748. static int stm32_cryp_des_cbc_decrypt(struct skcipher_request *req)
  749. {
  750. if (req->cryptlen % DES_BLOCK_SIZE)
  751. return -EINVAL;
  752. if (req->cryptlen == 0)
  753. return 0;
  754. return stm32_cryp_crypt(req, FLG_DES | FLG_CBC);
  755. }
  756. static int stm32_cryp_tdes_ecb_encrypt(struct skcipher_request *req)
  757. {
  758. if (req->cryptlen % DES_BLOCK_SIZE)
  759. return -EINVAL;
  760. if (req->cryptlen == 0)
  761. return 0;
  762. return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT);
  763. }
  764. static int stm32_cryp_tdes_ecb_decrypt(struct skcipher_request *req)
  765. {
  766. if (req->cryptlen % DES_BLOCK_SIZE)
  767. return -EINVAL;
  768. if (req->cryptlen == 0)
  769. return 0;
  770. return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB);
  771. }
  772. static int stm32_cryp_tdes_cbc_encrypt(struct skcipher_request *req)
  773. {
  774. if (req->cryptlen % DES_BLOCK_SIZE)
  775. return -EINVAL;
  776. if (req->cryptlen == 0)
  777. return 0;
  778. return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT);
  779. }
  780. static int stm32_cryp_tdes_cbc_decrypt(struct skcipher_request *req)
  781. {
  782. if (req->cryptlen % DES_BLOCK_SIZE)
  783. return -EINVAL;
  784. if (req->cryptlen == 0)
  785. return 0;
  786. return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC);
  787. }
  788. static int stm32_cryp_prepare_req(struct skcipher_request *req,
  789. struct aead_request *areq)
  790. {
  791. struct stm32_cryp_ctx *ctx;
  792. struct stm32_cryp *cryp;
  793. struct stm32_cryp_reqctx *rctx;
  794. struct scatterlist *in_sg;
  795. int ret;
  796. if (!req && !areq)
  797. return -EINVAL;
  798. ctx = req ? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)) :
  799. crypto_aead_ctx(crypto_aead_reqtfm(areq));
  800. cryp = ctx->cryp;
  801. if (!cryp)
  802. return -ENODEV;
  803. rctx = req ? skcipher_request_ctx(req) : aead_request_ctx(areq);
  804. rctx->mode &= FLG_MODE_MASK;
  805. ctx->cryp = cryp;
  806. cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode;
  807. cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE;
  808. cryp->ctx = ctx;
  809. if (req) {
  810. cryp->req = req;
  811. cryp->areq = NULL;
  812. cryp->header_in = 0;
  813. cryp->payload_in = req->cryptlen;
  814. cryp->payload_out = req->cryptlen;
  815. cryp->authsize = 0;
  816. } else {
  817. /*
  818. * Length of input and output data:
  819. * Encryption case:
  820. * INPUT = AssocData || PlainText
  821. * <- assoclen -> <- cryptlen ->
  822. *
  823. * OUTPUT = AssocData || CipherText || AuthTag
  824. * <- assoclen -> <-- cryptlen --> <- authsize ->
  825. *
  826. * Decryption case:
  827. * INPUT = AssocData || CipherTex || AuthTag
  828. * <- assoclen ---> <---------- cryptlen ---------->
  829. *
  830. * OUTPUT = AssocData || PlainText
  831. * <- assoclen -> <- cryptlen - authsize ->
  832. */
  833. cryp->areq = areq;
  834. cryp->req = NULL;
  835. cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
  836. if (is_encrypt(cryp)) {
  837. cryp->payload_in = areq->cryptlen;
  838. cryp->header_in = areq->assoclen;
  839. cryp->payload_out = areq->cryptlen;
  840. } else {
  841. cryp->payload_in = areq->cryptlen - cryp->authsize;
  842. cryp->header_in = areq->assoclen;
  843. cryp->payload_out = cryp->payload_in;
  844. }
  845. }
  846. in_sg = req ? req->src : areq->src;
  847. scatterwalk_start(&cryp->in_walk, in_sg);
  848. cryp->out_sg = req ? req->dst : areq->dst;
  849. scatterwalk_start(&cryp->out_walk, cryp->out_sg);
  850. if (is_gcm(cryp) || is_ccm(cryp)) {
  851. /* In output, jump after assoc data */
  852. scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->areq->assoclen, 2);
  853. }
  854. if (is_ctr(cryp))
  855. memset(cryp->last_ctr, 0, sizeof(cryp->last_ctr));
  856. ret = stm32_cryp_hw_init(cryp);
  857. return ret;
  858. }
  859. static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
  860. void *areq)
  861. {
  862. struct skcipher_request *req = container_of(areq,
  863. struct skcipher_request,
  864. base);
  865. return stm32_cryp_prepare_req(req, NULL);
  866. }
  867. static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq)
  868. {
  869. struct skcipher_request *req = container_of(areq,
  870. struct skcipher_request,
  871. base);
  872. struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
  873. crypto_skcipher_reqtfm(req));
  874. struct stm32_cryp *cryp = ctx->cryp;
  875. if (!cryp)
  876. return -ENODEV;
  877. return stm32_cryp_cpu_start(cryp);
  878. }
  879. static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine, void *areq)
  880. {
  881. struct aead_request *req = container_of(areq, struct aead_request,
  882. base);
  883. return stm32_cryp_prepare_req(NULL, req);
  884. }
  885. static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq)
  886. {
  887. struct aead_request *req = container_of(areq, struct aead_request,
  888. base);
  889. struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
  890. struct stm32_cryp *cryp = ctx->cryp;
  891. if (!cryp)
  892. return -ENODEV;
  893. if (unlikely(!cryp->payload_in && !cryp->header_in)) {
  894. /* No input data to process: get tag and finish */
  895. stm32_cryp_finish_req(cryp, 0);
  896. return 0;
  897. }
  898. return stm32_cryp_cpu_start(cryp);
  899. }
  900. static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp)
  901. {
  902. u32 cfg, size_bit;
  903. unsigned int i;
  904. int ret = 0;
  905. /* Update Config */
  906. cfg = stm32_cryp_read(cryp, CRYP_CR);
  907. cfg &= ~CR_PH_MASK;
  908. cfg |= CR_PH_FINAL;
  909. cfg &= ~CR_DEC_NOT_ENC;
  910. cfg |= CR_CRYPEN;
  911. stm32_cryp_write(cryp, CRYP_CR, cfg);
  912. if (is_gcm(cryp)) {
  913. /* GCM: write aad and payload size (in bits) */
  914. size_bit = cryp->areq->assoclen * 8;
  915. if (cryp->caps->swap_final)
  916. size_bit = (__force u32)cpu_to_be32(size_bit);
  917. stm32_cryp_write(cryp, CRYP_DIN, 0);
  918. stm32_cryp_write(cryp, CRYP_DIN, size_bit);
  919. size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen :
  920. cryp->areq->cryptlen - cryp->authsize;
  921. size_bit *= 8;
  922. if (cryp->caps->swap_final)
  923. size_bit = (__force u32)cpu_to_be32(size_bit);
  924. stm32_cryp_write(cryp, CRYP_DIN, 0);
  925. stm32_cryp_write(cryp, CRYP_DIN, size_bit);
  926. } else {
  927. /* CCM: write CTR0 */
  928. u32 iv32[AES_BLOCK_32];
  929. u8 *iv = (u8 *)iv32;
  930. __be32 *biv = (__be32 *)iv32;
  931. memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
  932. memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
  933. for (i = 0; i < AES_BLOCK_32; i++) {
  934. u32 xiv = iv32[i];
  935. if (!cryp->caps->padding_wa)
  936. xiv = be32_to_cpu(biv[i]);
  937. stm32_cryp_write(cryp, CRYP_DIN, xiv);
  938. }
  939. }
  940. /* Wait for output data */
  941. ret = stm32_cryp_wait_output(cryp);
  942. if (ret) {
  943. dev_err(cryp->dev, "Timeout (read tag)\n");
  944. return ret;
  945. }
  946. if (is_encrypt(cryp)) {
  947. u32 out_tag[AES_BLOCK_32];
  948. /* Get and write tag */
  949. for (i = 0; i < AES_BLOCK_32; i++)
  950. out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT);
  951. scatterwalk_copychunks(out_tag, &cryp->out_walk, cryp->authsize, 1);
  952. } else {
  953. /* Get and check tag */
  954. u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32];
  955. scatterwalk_copychunks(in_tag, &cryp->in_walk, cryp->authsize, 0);
  956. for (i = 0; i < AES_BLOCK_32; i++)
  957. out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT);
  958. if (crypto_memneq(in_tag, out_tag, cryp->authsize))
  959. ret = -EBADMSG;
  960. }
  961. /* Disable cryp */
  962. cfg &= ~CR_CRYPEN;
  963. stm32_cryp_write(cryp, CRYP_CR, cfg);
  964. return ret;
  965. }
  966. static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp)
  967. {
  968. u32 cr;
  969. if (unlikely(cryp->last_ctr[3] == cpu_to_be32(0xFFFFFFFF))) {
  970. /*
  971. * In this case, we need to increment manually the ctr counter,
  972. * as HW doesn't handle the U32 carry.
  973. */
  974. crypto_inc((u8 *)cryp->last_ctr, sizeof(cryp->last_ctr));
  975. cr = stm32_cryp_read(cryp, CRYP_CR);
  976. stm32_cryp_write(cryp, CRYP_CR, cr & ~CR_CRYPEN);
  977. stm32_cryp_hw_write_iv(cryp, cryp->last_ctr);
  978. stm32_cryp_write(cryp, CRYP_CR, cr);
  979. }
  980. /* The IV registers are BE */
  981. cryp->last_ctr[0] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0LR));
  982. cryp->last_ctr[1] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0RR));
  983. cryp->last_ctr[2] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1LR));
  984. cryp->last_ctr[3] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1RR));
  985. }
  986. static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
  987. {
  988. unsigned int i;
  989. u32 block[AES_BLOCK_32];
  990. for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
  991. block[i] = stm32_cryp_read(cryp, CRYP_DOUT);
  992. scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
  993. cryp->payload_out), 1);
  994. cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
  995. cryp->payload_out);
  996. }
  997. static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp)
  998. {
  999. unsigned int i;
  1000. u32 block[AES_BLOCK_32] = {0};
  1001. scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, cryp->hw_blocksize,
  1002. cryp->payload_in), 0);
  1003. for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
  1004. stm32_cryp_write(cryp, CRYP_DIN, block[i]);
  1005. cryp->payload_in -= min_t(size_t, cryp->hw_blocksize, cryp->payload_in);
  1006. }
  1007. static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp)
  1008. {
  1009. int err;
  1010. u32 cfg, block[AES_BLOCK_32] = {0};
  1011. unsigned int i;
  1012. /* 'Special workaround' procedure described in the datasheet */
  1013. /* a) disable ip */
  1014. stm32_cryp_write(cryp, CRYP_IMSCR, 0);
  1015. cfg = stm32_cryp_read(cryp, CRYP_CR);
  1016. cfg &= ~CR_CRYPEN;
  1017. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1018. /* b) Update IV1R */
  1019. stm32_cryp_write(cryp, CRYP_IV1RR, cryp->gcm_ctr - 2);
  1020. /* c) change mode to CTR */
  1021. cfg &= ~CR_ALGO_MASK;
  1022. cfg |= CR_AES_CTR;
  1023. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1024. /* a) enable IP */
  1025. cfg |= CR_CRYPEN;
  1026. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1027. /* b) pad and write the last block */
  1028. stm32_cryp_irq_write_block(cryp);
  1029. /* wait end of process */
  1030. err = stm32_cryp_wait_output(cryp);
  1031. if (err) {
  1032. dev_err(cryp->dev, "Timeout (write gcm last data)\n");
  1033. return stm32_cryp_finish_req(cryp, err);
  1034. }
  1035. /* c) get and store encrypted data */
  1036. /*
  1037. * Same code as stm32_cryp_irq_read_data(), but we want to store
  1038. * block value
  1039. */
  1040. for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
  1041. block[i] = stm32_cryp_read(cryp, CRYP_DOUT);
  1042. scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
  1043. cryp->payload_out), 1);
  1044. cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
  1045. cryp->payload_out);
  1046. /* d) change mode back to AES GCM */
  1047. cfg &= ~CR_ALGO_MASK;
  1048. cfg |= CR_AES_GCM;
  1049. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1050. /* e) change phase to Final */
  1051. cfg &= ~CR_PH_MASK;
  1052. cfg |= CR_PH_FINAL;
  1053. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1054. /* f) write padded data */
  1055. for (i = 0; i < AES_BLOCK_32; i++)
  1056. stm32_cryp_write(cryp, CRYP_DIN, block[i]);
  1057. /* g) Empty fifo out */
  1058. err = stm32_cryp_wait_output(cryp);
  1059. if (err) {
  1060. dev_err(cryp->dev, "Timeout (write gcm padded data)\n");
  1061. return stm32_cryp_finish_req(cryp, err);
  1062. }
  1063. for (i = 0; i < AES_BLOCK_32; i++)
  1064. stm32_cryp_read(cryp, CRYP_DOUT);
  1065. /* h) run the he normal Final phase */
  1066. stm32_cryp_finish_req(cryp, 0);
  1067. }
  1068. static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp)
  1069. {
  1070. u32 cfg;
  1071. /* disable ip, set NPBLB and reneable ip */
  1072. cfg = stm32_cryp_read(cryp, CRYP_CR);
  1073. cfg &= ~CR_CRYPEN;
  1074. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1075. cfg |= (cryp->hw_blocksize - cryp->payload_in) << CR_NBPBL_SHIFT;
  1076. cfg |= CR_CRYPEN;
  1077. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1078. }
  1079. static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp)
  1080. {
  1081. int err = 0;
  1082. u32 cfg, iv1tmp;
  1083. u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32];
  1084. u32 block[AES_BLOCK_32] = {0};
  1085. unsigned int i;
  1086. /* 'Special workaround' procedure described in the datasheet */
  1087. /* a) disable ip */
  1088. stm32_cryp_write(cryp, CRYP_IMSCR, 0);
  1089. cfg = stm32_cryp_read(cryp, CRYP_CR);
  1090. cfg &= ~CR_CRYPEN;
  1091. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1092. /* b) get IV1 from CRYP_CSGCMCCM7 */
  1093. iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4);
  1094. /* c) Load CRYP_CSGCMCCMxR */
  1095. for (i = 0; i < ARRAY_SIZE(cstmp1); i++)
  1096. cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
  1097. /* d) Write IV1R */
  1098. stm32_cryp_write(cryp, CRYP_IV1RR, iv1tmp);
  1099. /* e) change mode to CTR */
  1100. cfg &= ~CR_ALGO_MASK;
  1101. cfg |= CR_AES_CTR;
  1102. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1103. /* a) enable IP */
  1104. cfg |= CR_CRYPEN;
  1105. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1106. /* b) pad and write the last block */
  1107. stm32_cryp_irq_write_block(cryp);
  1108. /* wait end of process */
  1109. err = stm32_cryp_wait_output(cryp);
  1110. if (err) {
  1111. dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
  1112. return stm32_cryp_finish_req(cryp, err);
  1113. }
  1114. /* c) get and store decrypted data */
  1115. /*
  1116. * Same code as stm32_cryp_irq_read_data(), but we want to store
  1117. * block value
  1118. */
  1119. for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
  1120. block[i] = stm32_cryp_read(cryp, CRYP_DOUT);
  1121. scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
  1122. cryp->payload_out), 1);
  1123. cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, cryp->payload_out);
  1124. /* d) Load again CRYP_CSGCMCCMxR */
  1125. for (i = 0; i < ARRAY_SIZE(cstmp2); i++)
  1126. cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
  1127. /* e) change mode back to AES CCM */
  1128. cfg &= ~CR_ALGO_MASK;
  1129. cfg |= CR_AES_CCM;
  1130. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1131. /* f) change phase to header */
  1132. cfg &= ~CR_PH_MASK;
  1133. cfg |= CR_PH_HEADER;
  1134. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1135. /* g) XOR and write padded data */
  1136. for (i = 0; i < ARRAY_SIZE(block); i++) {
  1137. block[i] ^= cstmp1[i];
  1138. block[i] ^= cstmp2[i];
  1139. stm32_cryp_write(cryp, CRYP_DIN, block[i]);
  1140. }
  1141. /* h) wait for completion */
  1142. err = stm32_cryp_wait_busy(cryp);
  1143. if (err)
  1144. dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
  1145. /* i) run the he normal Final phase */
  1146. stm32_cryp_finish_req(cryp, err);
  1147. }
  1148. static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp)
  1149. {
  1150. if (unlikely(!cryp->payload_in)) {
  1151. dev_warn(cryp->dev, "No more data to process\n");
  1152. return;
  1153. }
  1154. if (unlikely(cryp->payload_in < AES_BLOCK_SIZE &&
  1155. (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) &&
  1156. is_encrypt(cryp))) {
  1157. /* Padding for AES GCM encryption */
  1158. if (cryp->caps->padding_wa) {
  1159. /* Special case 1 */
  1160. stm32_cryp_irq_write_gcm_padded_data(cryp);
  1161. return;
  1162. }
  1163. /* Setting padding bytes (NBBLB) */
  1164. stm32_cryp_irq_set_npblb(cryp);
  1165. }
  1166. if (unlikely((cryp->payload_in < AES_BLOCK_SIZE) &&
  1167. (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) &&
  1168. is_decrypt(cryp))) {
  1169. /* Padding for AES CCM decryption */
  1170. if (cryp->caps->padding_wa) {
  1171. /* Special case 2 */
  1172. stm32_cryp_irq_write_ccm_padded_data(cryp);
  1173. return;
  1174. }
  1175. /* Setting padding bytes (NBBLB) */
  1176. stm32_cryp_irq_set_npblb(cryp);
  1177. }
  1178. if (is_aes(cryp) && is_ctr(cryp))
  1179. stm32_cryp_check_ctr_counter(cryp);
  1180. stm32_cryp_irq_write_block(cryp);
  1181. }
  1182. static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp)
  1183. {
  1184. unsigned int i;
  1185. u32 block[AES_BLOCK_32] = {0};
  1186. size_t written;
  1187. written = min_t(size_t, AES_BLOCK_SIZE, cryp->header_in);
  1188. scatterwalk_copychunks(block, &cryp->in_walk, written, 0);
  1189. for (i = 0; i < AES_BLOCK_32; i++)
  1190. stm32_cryp_write(cryp, CRYP_DIN, block[i]);
  1191. cryp->header_in -= written;
  1192. stm32_crypt_gcmccm_end_header(cryp);
  1193. }
  1194. static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg)
  1195. {
  1196. struct stm32_cryp *cryp = arg;
  1197. u32 ph;
  1198. u32 it_mask = stm32_cryp_read(cryp, CRYP_IMSCR);
  1199. if (cryp->irq_status & MISR_OUT)
  1200. /* Output FIFO IRQ: read data */
  1201. stm32_cryp_irq_read_data(cryp);
  1202. if (cryp->irq_status & MISR_IN) {
  1203. if (is_gcm(cryp) || is_ccm(cryp)) {
  1204. ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK;
  1205. if (unlikely(ph == CR_PH_HEADER))
  1206. /* Write Header */
  1207. stm32_cryp_irq_write_gcmccm_header(cryp);
  1208. else
  1209. /* Input FIFO IRQ: write data */
  1210. stm32_cryp_irq_write_data(cryp);
  1211. if (is_gcm(cryp))
  1212. cryp->gcm_ctr++;
  1213. } else {
  1214. /* Input FIFO IRQ: write data */
  1215. stm32_cryp_irq_write_data(cryp);
  1216. }
  1217. }
  1218. /* Mask useless interrupts */
  1219. if (!cryp->payload_in && !cryp->header_in)
  1220. it_mask &= ~IMSCR_IN;
  1221. if (!cryp->payload_out)
  1222. it_mask &= ~IMSCR_OUT;
  1223. stm32_cryp_write(cryp, CRYP_IMSCR, it_mask);
  1224. if (!cryp->payload_in && !cryp->header_in && !cryp->payload_out)
  1225. stm32_cryp_finish_req(cryp, 0);
  1226. return IRQ_HANDLED;
  1227. }
  1228. static irqreturn_t stm32_cryp_irq(int irq, void *arg)
  1229. {
  1230. struct stm32_cryp *cryp = arg;
  1231. cryp->irq_status = stm32_cryp_read(cryp, CRYP_MISR);
  1232. return IRQ_WAKE_THREAD;
  1233. }
  1234. static struct skcipher_alg crypto_algs[] = {
  1235. {
  1236. .base.cra_name = "ecb(aes)",
  1237. .base.cra_driver_name = "stm32-ecb-aes",
  1238. .base.cra_priority = 200,
  1239. .base.cra_flags = CRYPTO_ALG_ASYNC,
  1240. .base.cra_blocksize = AES_BLOCK_SIZE,
  1241. .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1242. .base.cra_alignmask = 0,
  1243. .base.cra_module = THIS_MODULE,
  1244. .init = stm32_cryp_init_tfm,
  1245. .min_keysize = AES_MIN_KEY_SIZE,
  1246. .max_keysize = AES_MAX_KEY_SIZE,
  1247. .setkey = stm32_cryp_aes_setkey,
  1248. .encrypt = stm32_cryp_aes_ecb_encrypt,
  1249. .decrypt = stm32_cryp_aes_ecb_decrypt,
  1250. },
  1251. {
  1252. .base.cra_name = "cbc(aes)",
  1253. .base.cra_driver_name = "stm32-cbc-aes",
  1254. .base.cra_priority = 200,
  1255. .base.cra_flags = CRYPTO_ALG_ASYNC,
  1256. .base.cra_blocksize = AES_BLOCK_SIZE,
  1257. .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1258. .base.cra_alignmask = 0,
  1259. .base.cra_module = THIS_MODULE,
  1260. .init = stm32_cryp_init_tfm,
  1261. .min_keysize = AES_MIN_KEY_SIZE,
  1262. .max_keysize = AES_MAX_KEY_SIZE,
  1263. .ivsize = AES_BLOCK_SIZE,
  1264. .setkey = stm32_cryp_aes_setkey,
  1265. .encrypt = stm32_cryp_aes_cbc_encrypt,
  1266. .decrypt = stm32_cryp_aes_cbc_decrypt,
  1267. },
  1268. {
  1269. .base.cra_name = "ctr(aes)",
  1270. .base.cra_driver_name = "stm32-ctr-aes",
  1271. .base.cra_priority = 200,
  1272. .base.cra_flags = CRYPTO_ALG_ASYNC,
  1273. .base.cra_blocksize = 1,
  1274. .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1275. .base.cra_alignmask = 0,
  1276. .base.cra_module = THIS_MODULE,
  1277. .init = stm32_cryp_init_tfm,
  1278. .min_keysize = AES_MIN_KEY_SIZE,
  1279. .max_keysize = AES_MAX_KEY_SIZE,
  1280. .ivsize = AES_BLOCK_SIZE,
  1281. .setkey = stm32_cryp_aes_setkey,
  1282. .encrypt = stm32_cryp_aes_ctr_encrypt,
  1283. .decrypt = stm32_cryp_aes_ctr_decrypt,
  1284. },
  1285. {
  1286. .base.cra_name = "ecb(des)",
  1287. .base.cra_driver_name = "stm32-ecb-des",
  1288. .base.cra_priority = 200,
  1289. .base.cra_flags = CRYPTO_ALG_ASYNC,
  1290. .base.cra_blocksize = DES_BLOCK_SIZE,
  1291. .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1292. .base.cra_alignmask = 0,
  1293. .base.cra_module = THIS_MODULE,
  1294. .init = stm32_cryp_init_tfm,
  1295. .min_keysize = DES_BLOCK_SIZE,
  1296. .max_keysize = DES_BLOCK_SIZE,
  1297. .setkey = stm32_cryp_des_setkey,
  1298. .encrypt = stm32_cryp_des_ecb_encrypt,
  1299. .decrypt = stm32_cryp_des_ecb_decrypt,
  1300. },
  1301. {
  1302. .base.cra_name = "cbc(des)",
  1303. .base.cra_driver_name = "stm32-cbc-des",
  1304. .base.cra_priority = 200,
  1305. .base.cra_flags = CRYPTO_ALG_ASYNC,
  1306. .base.cra_blocksize = DES_BLOCK_SIZE,
  1307. .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1308. .base.cra_alignmask = 0,
  1309. .base.cra_module = THIS_MODULE,
  1310. .init = stm32_cryp_init_tfm,
  1311. .min_keysize = DES_BLOCK_SIZE,
  1312. .max_keysize = DES_BLOCK_SIZE,
  1313. .ivsize = DES_BLOCK_SIZE,
  1314. .setkey = stm32_cryp_des_setkey,
  1315. .encrypt = stm32_cryp_des_cbc_encrypt,
  1316. .decrypt = stm32_cryp_des_cbc_decrypt,
  1317. },
  1318. {
  1319. .base.cra_name = "ecb(des3_ede)",
  1320. .base.cra_driver_name = "stm32-ecb-des3",
  1321. .base.cra_priority = 200,
  1322. .base.cra_flags = CRYPTO_ALG_ASYNC,
  1323. .base.cra_blocksize = DES_BLOCK_SIZE,
  1324. .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1325. .base.cra_alignmask = 0,
  1326. .base.cra_module = THIS_MODULE,
  1327. .init = stm32_cryp_init_tfm,
  1328. .min_keysize = 3 * DES_BLOCK_SIZE,
  1329. .max_keysize = 3 * DES_BLOCK_SIZE,
  1330. .setkey = stm32_cryp_tdes_setkey,
  1331. .encrypt = stm32_cryp_tdes_ecb_encrypt,
  1332. .decrypt = stm32_cryp_tdes_ecb_decrypt,
  1333. },
  1334. {
  1335. .base.cra_name = "cbc(des3_ede)",
  1336. .base.cra_driver_name = "stm32-cbc-des3",
  1337. .base.cra_priority = 200,
  1338. .base.cra_flags = CRYPTO_ALG_ASYNC,
  1339. .base.cra_blocksize = DES_BLOCK_SIZE,
  1340. .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1341. .base.cra_alignmask = 0,
  1342. .base.cra_module = THIS_MODULE,
  1343. .init = stm32_cryp_init_tfm,
  1344. .min_keysize = 3 * DES_BLOCK_SIZE,
  1345. .max_keysize = 3 * DES_BLOCK_SIZE,
  1346. .ivsize = DES_BLOCK_SIZE,
  1347. .setkey = stm32_cryp_tdes_setkey,
  1348. .encrypt = stm32_cryp_tdes_cbc_encrypt,
  1349. .decrypt = stm32_cryp_tdes_cbc_decrypt,
  1350. },
  1351. };
  1352. static struct aead_alg aead_algs[] = {
  1353. {
  1354. .setkey = stm32_cryp_aes_aead_setkey,
  1355. .setauthsize = stm32_cryp_aes_gcm_setauthsize,
  1356. .encrypt = stm32_cryp_aes_gcm_encrypt,
  1357. .decrypt = stm32_cryp_aes_gcm_decrypt,
  1358. .init = stm32_cryp_aes_aead_init,
  1359. .ivsize = 12,
  1360. .maxauthsize = AES_BLOCK_SIZE,
  1361. .base = {
  1362. .cra_name = "gcm(aes)",
  1363. .cra_driver_name = "stm32-gcm-aes",
  1364. .cra_priority = 200,
  1365. .cra_flags = CRYPTO_ALG_ASYNC,
  1366. .cra_blocksize = 1,
  1367. .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1368. .cra_alignmask = 0,
  1369. .cra_module = THIS_MODULE,
  1370. },
  1371. },
  1372. {
  1373. .setkey = stm32_cryp_aes_aead_setkey,
  1374. .setauthsize = stm32_cryp_aes_ccm_setauthsize,
  1375. .encrypt = stm32_cryp_aes_ccm_encrypt,
  1376. .decrypt = stm32_cryp_aes_ccm_decrypt,
  1377. .init = stm32_cryp_aes_aead_init,
  1378. .ivsize = AES_BLOCK_SIZE,
  1379. .maxauthsize = AES_BLOCK_SIZE,
  1380. .base = {
  1381. .cra_name = "ccm(aes)",
  1382. .cra_driver_name = "stm32-ccm-aes",
  1383. .cra_priority = 200,
  1384. .cra_flags = CRYPTO_ALG_ASYNC,
  1385. .cra_blocksize = 1,
  1386. .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1387. .cra_alignmask = 0,
  1388. .cra_module = THIS_MODULE,
  1389. },
  1390. },
  1391. };
  1392. static const struct stm32_cryp_caps f7_data = {
  1393. .swap_final = true,
  1394. .padding_wa = true,
  1395. };
  1396. static const struct stm32_cryp_caps mp1_data = {
  1397. .swap_final = false,
  1398. .padding_wa = false,
  1399. };
  1400. static const struct of_device_id stm32_dt_ids[] = {
  1401. { .compatible = "st,stm32f756-cryp", .data = &f7_data},
  1402. { .compatible = "st,stm32mp1-cryp", .data = &mp1_data},
  1403. {},
  1404. };
  1405. MODULE_DEVICE_TABLE(of, stm32_dt_ids);
  1406. static int stm32_cryp_probe(struct platform_device *pdev)
  1407. {
  1408. struct device *dev = &pdev->dev;
  1409. struct stm32_cryp *cryp;
  1410. struct reset_control *rst;
  1411. int irq, ret;
  1412. cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL);
  1413. if (!cryp)
  1414. return -ENOMEM;
  1415. cryp->caps = of_device_get_match_data(dev);
  1416. if (!cryp->caps)
  1417. return -ENODEV;
  1418. cryp->dev = dev;
  1419. cryp->regs = devm_platform_ioremap_resource(pdev, 0);
  1420. if (IS_ERR(cryp->regs))
  1421. return PTR_ERR(cryp->regs);
  1422. irq = platform_get_irq(pdev, 0);
  1423. if (irq < 0)
  1424. return irq;
  1425. ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq,
  1426. stm32_cryp_irq_thread, IRQF_ONESHOT,
  1427. dev_name(dev), cryp);
  1428. if (ret) {
  1429. dev_err(dev, "Cannot grab IRQ\n");
  1430. return ret;
  1431. }
  1432. cryp->clk = devm_clk_get(dev, NULL);
  1433. if (IS_ERR(cryp->clk)) {
  1434. dev_err_probe(dev, PTR_ERR(cryp->clk), "Could not get clock\n");
  1435. return PTR_ERR(cryp->clk);
  1436. }
  1437. ret = clk_prepare_enable(cryp->clk);
  1438. if (ret) {
  1439. dev_err(cryp->dev, "Failed to enable clock\n");
  1440. return ret;
  1441. }
  1442. pm_runtime_set_autosuspend_delay(dev, CRYP_AUTOSUSPEND_DELAY);
  1443. pm_runtime_use_autosuspend(dev);
  1444. pm_runtime_get_noresume(dev);
  1445. pm_runtime_set_active(dev);
  1446. pm_runtime_enable(dev);
  1447. rst = devm_reset_control_get(dev, NULL);
  1448. if (IS_ERR(rst)) {
  1449. ret = PTR_ERR(rst);
  1450. if (ret == -EPROBE_DEFER)
  1451. goto err_rst;
  1452. } else {
  1453. reset_control_assert(rst);
  1454. udelay(2);
  1455. reset_control_deassert(rst);
  1456. }
  1457. platform_set_drvdata(pdev, cryp);
  1458. spin_lock(&cryp_list.lock);
  1459. list_add(&cryp->list, &cryp_list.dev_list);
  1460. spin_unlock(&cryp_list.lock);
  1461. /* Initialize crypto engine */
  1462. cryp->engine = crypto_engine_alloc_init(dev, 1);
  1463. if (!cryp->engine) {
  1464. dev_err(dev, "Could not init crypto engine\n");
  1465. ret = -ENOMEM;
  1466. goto err_engine1;
  1467. }
  1468. ret = crypto_engine_start(cryp->engine);
  1469. if (ret) {
  1470. dev_err(dev, "Could not start crypto engine\n");
  1471. goto err_engine2;
  1472. }
  1473. ret = crypto_register_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
  1474. if (ret) {
  1475. dev_err(dev, "Could not register algs\n");
  1476. goto err_algs;
  1477. }
  1478. ret = crypto_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
  1479. if (ret)
  1480. goto err_aead_algs;
  1481. dev_info(dev, "Initialized\n");
  1482. pm_runtime_put_sync(dev);
  1483. return 0;
  1484. err_aead_algs:
  1485. crypto_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
  1486. err_algs:
  1487. err_engine2:
  1488. crypto_engine_exit(cryp->engine);
  1489. err_engine1:
  1490. spin_lock(&cryp_list.lock);
  1491. list_del(&cryp->list);
  1492. spin_unlock(&cryp_list.lock);
  1493. err_rst:
  1494. pm_runtime_disable(dev);
  1495. pm_runtime_put_noidle(dev);
  1496. clk_disable_unprepare(cryp->clk);
  1497. return ret;
  1498. }
  1499. static int stm32_cryp_remove(struct platform_device *pdev)
  1500. {
  1501. struct stm32_cryp *cryp = platform_get_drvdata(pdev);
  1502. int ret;
  1503. if (!cryp)
  1504. return -ENODEV;
  1505. ret = pm_runtime_resume_and_get(cryp->dev);
  1506. if (ret < 0)
  1507. return ret;
  1508. crypto_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
  1509. crypto_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
  1510. crypto_engine_exit(cryp->engine);
  1511. spin_lock(&cryp_list.lock);
  1512. list_del(&cryp->list);
  1513. spin_unlock(&cryp_list.lock);
  1514. pm_runtime_disable(cryp->dev);
  1515. pm_runtime_put_noidle(cryp->dev);
  1516. clk_disable_unprepare(cryp->clk);
  1517. return 0;
  1518. }
  1519. #ifdef CONFIG_PM
  1520. static int stm32_cryp_runtime_suspend(struct device *dev)
  1521. {
  1522. struct stm32_cryp *cryp = dev_get_drvdata(dev);
  1523. clk_disable_unprepare(cryp->clk);
  1524. return 0;
  1525. }
  1526. static int stm32_cryp_runtime_resume(struct device *dev)
  1527. {
  1528. struct stm32_cryp *cryp = dev_get_drvdata(dev);
  1529. int ret;
  1530. ret = clk_prepare_enable(cryp->clk);
  1531. if (ret) {
  1532. dev_err(cryp->dev, "Failed to prepare_enable clock\n");
  1533. return ret;
  1534. }
  1535. return 0;
  1536. }
  1537. #endif
  1538. static const struct dev_pm_ops stm32_cryp_pm_ops = {
  1539. SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
  1540. pm_runtime_force_resume)
  1541. SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend,
  1542. stm32_cryp_runtime_resume, NULL)
  1543. };
  1544. static struct platform_driver stm32_cryp_driver = {
  1545. .probe = stm32_cryp_probe,
  1546. .remove = stm32_cryp_remove,
  1547. .driver = {
  1548. .name = DRIVER_NAME,
  1549. .pm = &stm32_cryp_pm_ops,
  1550. .of_match_table = stm32_dt_ids,
  1551. },
  1552. };
  1553. module_platform_driver(stm32_cryp_driver);
  1554. MODULE_AUTHOR("Fabien Dessenne <[email protected]>");
  1555. MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");
  1556. MODULE_LICENSE("GPL");