caamalg.c 96 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661
  1. // SPDX-License-Identifier: GPL-2.0+
  2. /*
  3. * caam - Freescale FSL CAAM support for crypto API
  4. *
  5. * Copyright 2008-2011 Freescale Semiconductor, Inc.
  6. * Copyright 2016-2019 NXP
  7. *
  8. * Based on talitos crypto API driver.
  9. *
  10. * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
  11. *
  12. * --------------- ---------------
  13. * | JobDesc #1 |-------------------->| ShareDesc |
  14. * | *(packet 1) | | (PDB) |
  15. * --------------- |------------->| (hashKey) |
  16. * . | | (cipherKey) |
  17. * . | |-------->| (operation) |
  18. * --------------- | | ---------------
  19. * | JobDesc #2 |------| |
  20. * | *(packet 2) | |
  21. * --------------- |
  22. * . |
  23. * . |
  24. * --------------- |
  25. * | JobDesc #3 |------------
  26. * | *(packet 3) |
  27. * ---------------
  28. *
  29. * The SharedDesc never changes for a connection unless rekeyed, but
  30. * each packet will likely be in a different place. So all we need
  31. * to know to process the packet is where the input is, where the
  32. * output goes, and what context we want to process with. Context is
  33. * in the SharedDesc, packet references in the JobDesc.
  34. *
  35. * So, a job desc looks like:
  36. *
  37. * ---------------------
  38. * | Header |
  39. * | ShareDesc Pointer |
  40. * | SEQ_OUT_PTR |
  41. * | (output buffer) |
  42. * | (output length) |
  43. * | SEQ_IN_PTR |
  44. * | (input buffer) |
  45. * | (input length) |
  46. * ---------------------
  47. */
  48. #include "compat.h"
  49. #include "regs.h"
  50. #include "intern.h"
  51. #include "desc_constr.h"
  52. #include "jr.h"
  53. #include "error.h"
  54. #include "sg_sw_sec4.h"
  55. #include "key_gen.h"
  56. #include "caamalg_desc.h"
  57. #include <crypto/engine.h>
  58. #include <crypto/xts.h>
  59. #include <asm/unaligned.h>
  60. /*
  61. * crypto alg
  62. */
  63. #define CAAM_CRA_PRIORITY 3000
  64. /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
  65. #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
  66. CTR_RFC3686_NONCE_SIZE + \
  67. SHA512_DIGEST_SIZE * 2)
  68. #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
  69. #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
  70. CAAM_CMD_SZ * 4)
  71. #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
  72. CAAM_CMD_SZ * 5)
  73. #define CHACHAPOLY_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6)
  74. #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN_MIN)
  75. #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
  76. struct caam_alg_entry {
  77. int class1_alg_type;
  78. int class2_alg_type;
  79. bool rfc3686;
  80. bool geniv;
  81. bool nodkp;
  82. };
  83. struct caam_aead_alg {
  84. struct aead_alg aead;
  85. struct caam_alg_entry caam;
  86. bool registered;
  87. };
  88. struct caam_skcipher_alg {
  89. struct skcipher_alg skcipher;
  90. struct caam_alg_entry caam;
  91. bool registered;
  92. };
  93. /*
  94. * per-session context
  95. */
  96. struct caam_ctx {
  97. struct crypto_engine_ctx enginectx;
  98. u32 sh_desc_enc[DESC_MAX_USED_LEN];
  99. u32 sh_desc_dec[DESC_MAX_USED_LEN];
  100. u8 key[CAAM_MAX_KEY_SIZE];
  101. dma_addr_t sh_desc_enc_dma;
  102. dma_addr_t sh_desc_dec_dma;
  103. dma_addr_t key_dma;
  104. enum dma_data_direction dir;
  105. struct device *jrdev;
  106. struct alginfo adata;
  107. struct alginfo cdata;
  108. unsigned int authsize;
  109. bool xts_key_fallback;
  110. struct crypto_skcipher *fallback;
  111. };
  112. struct caam_skcipher_req_ctx {
  113. struct skcipher_edesc *edesc;
  114. struct skcipher_request fallback_req;
  115. };
  116. struct caam_aead_req_ctx {
  117. struct aead_edesc *edesc;
  118. };
  119. static int aead_null_set_sh_desc(struct crypto_aead *aead)
  120. {
  121. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  122. struct device *jrdev = ctx->jrdev;
  123. struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
  124. u32 *desc;
  125. int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
  126. ctx->adata.keylen_pad;
  127. /*
  128. * Job Descriptor and Shared Descriptors
  129. * must all fit into the 64-word Descriptor h/w Buffer
  130. */
  131. if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
  132. ctx->adata.key_inline = true;
  133. ctx->adata.key_virt = ctx->key;
  134. } else {
  135. ctx->adata.key_inline = false;
  136. ctx->adata.key_dma = ctx->key_dma;
  137. }
  138. /* aead_encrypt shared descriptor */
  139. desc = ctx->sh_desc_enc;
  140. cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
  141. ctrlpriv->era);
  142. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  143. desc_bytes(desc), ctx->dir);
  144. /*
  145. * Job Descriptor and Shared Descriptors
  146. * must all fit into the 64-word Descriptor h/w Buffer
  147. */
  148. if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
  149. ctx->adata.key_inline = true;
  150. ctx->adata.key_virt = ctx->key;
  151. } else {
  152. ctx->adata.key_inline = false;
  153. ctx->adata.key_dma = ctx->key_dma;
  154. }
  155. /* aead_decrypt shared descriptor */
  156. desc = ctx->sh_desc_dec;
  157. cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
  158. ctrlpriv->era);
  159. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  160. desc_bytes(desc), ctx->dir);
  161. return 0;
  162. }
  163. static int aead_set_sh_desc(struct crypto_aead *aead)
  164. {
  165. struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
  166. struct caam_aead_alg, aead);
  167. unsigned int ivsize = crypto_aead_ivsize(aead);
  168. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  169. struct device *jrdev = ctx->jrdev;
  170. struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
  171. u32 ctx1_iv_off = 0;
  172. u32 *desc, *nonce = NULL;
  173. u32 inl_mask;
  174. unsigned int data_len[2];
  175. const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
  176. OP_ALG_AAI_CTR_MOD128);
  177. const bool is_rfc3686 = alg->caam.rfc3686;
  178. if (!ctx->authsize)
  179. return 0;
  180. /* NULL encryption / decryption */
  181. if (!ctx->cdata.keylen)
  182. return aead_null_set_sh_desc(aead);
  183. /*
  184. * AES-CTR needs to load IV in CONTEXT1 reg
  185. * at an offset of 128bits (16bytes)
  186. * CONTEXT1[255:128] = IV
  187. */
  188. if (ctr_mode)
  189. ctx1_iv_off = 16;
  190. /*
  191. * RFC3686 specific:
  192. * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
  193. */
  194. if (is_rfc3686) {
  195. ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
  196. nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
  197. ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
  198. }
  199. /*
  200. * In case |user key| > |derived key|, using DKP<imm,imm>
  201. * would result in invalid opcodes (last bytes of user key) in
  202. * the resulting descriptor. Use DKP<ptr,imm> instead => both
  203. * virtual and dma key addresses are needed.
  204. */
  205. ctx->adata.key_virt = ctx->key;
  206. ctx->adata.key_dma = ctx->key_dma;
  207. ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
  208. ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
  209. data_len[0] = ctx->adata.keylen_pad;
  210. data_len[1] = ctx->cdata.keylen;
  211. if (alg->caam.geniv)
  212. goto skip_enc;
  213. /*
  214. * Job Descriptor and Shared Descriptors
  215. * must all fit into the 64-word Descriptor h/w Buffer
  216. */
  217. if (desc_inline_query(DESC_AEAD_ENC_LEN +
  218. (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
  219. AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
  220. ARRAY_SIZE(data_len)) < 0)
  221. return -EINVAL;
  222. ctx->adata.key_inline = !!(inl_mask & 1);
  223. ctx->cdata.key_inline = !!(inl_mask & 2);
  224. /* aead_encrypt shared descriptor */
  225. desc = ctx->sh_desc_enc;
  226. cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
  227. ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
  228. false, ctrlpriv->era);
  229. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  230. desc_bytes(desc), ctx->dir);
  231. skip_enc:
  232. /*
  233. * Job Descriptor and Shared Descriptors
  234. * must all fit into the 64-word Descriptor h/w Buffer
  235. */
  236. if (desc_inline_query(DESC_AEAD_DEC_LEN +
  237. (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
  238. AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
  239. ARRAY_SIZE(data_len)) < 0)
  240. return -EINVAL;
  241. ctx->adata.key_inline = !!(inl_mask & 1);
  242. ctx->cdata.key_inline = !!(inl_mask & 2);
  243. /* aead_decrypt shared descriptor */
  244. desc = ctx->sh_desc_dec;
  245. cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
  246. ctx->authsize, alg->caam.geniv, is_rfc3686,
  247. nonce, ctx1_iv_off, false, ctrlpriv->era);
  248. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  249. desc_bytes(desc), ctx->dir);
  250. if (!alg->caam.geniv)
  251. goto skip_givenc;
  252. /*
  253. * Job Descriptor and Shared Descriptors
  254. * must all fit into the 64-word Descriptor h/w Buffer
  255. */
  256. if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
  257. (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
  258. AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
  259. ARRAY_SIZE(data_len)) < 0)
  260. return -EINVAL;
  261. ctx->adata.key_inline = !!(inl_mask & 1);
  262. ctx->cdata.key_inline = !!(inl_mask & 2);
  263. /* aead_givencrypt shared descriptor */
  264. desc = ctx->sh_desc_enc;
  265. cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
  266. ctx->authsize, is_rfc3686, nonce,
  267. ctx1_iv_off, false, ctrlpriv->era);
  268. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  269. desc_bytes(desc), ctx->dir);
  270. skip_givenc:
  271. return 0;
  272. }
  273. static int aead_setauthsize(struct crypto_aead *authenc,
  274. unsigned int authsize)
  275. {
  276. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  277. ctx->authsize = authsize;
  278. aead_set_sh_desc(authenc);
  279. return 0;
  280. }
  281. static int gcm_set_sh_desc(struct crypto_aead *aead)
  282. {
  283. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  284. struct device *jrdev = ctx->jrdev;
  285. unsigned int ivsize = crypto_aead_ivsize(aead);
  286. u32 *desc;
  287. int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
  288. ctx->cdata.keylen;
  289. if (!ctx->cdata.keylen || !ctx->authsize)
  290. return 0;
  291. /*
  292. * AES GCM encrypt shared descriptor
  293. * Job Descriptor and Shared Descriptor
  294. * must fit into the 64-word Descriptor h/w Buffer
  295. */
  296. if (rem_bytes >= DESC_GCM_ENC_LEN) {
  297. ctx->cdata.key_inline = true;
  298. ctx->cdata.key_virt = ctx->key;
  299. } else {
  300. ctx->cdata.key_inline = false;
  301. ctx->cdata.key_dma = ctx->key_dma;
  302. }
  303. desc = ctx->sh_desc_enc;
  304. cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
  305. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  306. desc_bytes(desc), ctx->dir);
  307. /*
  308. * Job Descriptor and Shared Descriptors
  309. * must all fit into the 64-word Descriptor h/w Buffer
  310. */
  311. if (rem_bytes >= DESC_GCM_DEC_LEN) {
  312. ctx->cdata.key_inline = true;
  313. ctx->cdata.key_virt = ctx->key;
  314. } else {
  315. ctx->cdata.key_inline = false;
  316. ctx->cdata.key_dma = ctx->key_dma;
  317. }
  318. desc = ctx->sh_desc_dec;
  319. cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
  320. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  321. desc_bytes(desc), ctx->dir);
  322. return 0;
  323. }
  324. static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
  325. {
  326. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  327. int err;
  328. err = crypto_gcm_check_authsize(authsize);
  329. if (err)
  330. return err;
  331. ctx->authsize = authsize;
  332. gcm_set_sh_desc(authenc);
  333. return 0;
  334. }
  335. static int rfc4106_set_sh_desc(struct crypto_aead *aead)
  336. {
  337. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  338. struct device *jrdev = ctx->jrdev;
  339. unsigned int ivsize = crypto_aead_ivsize(aead);
  340. u32 *desc;
  341. int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
  342. ctx->cdata.keylen;
  343. if (!ctx->cdata.keylen || !ctx->authsize)
  344. return 0;
  345. /*
  346. * RFC4106 encrypt shared descriptor
  347. * Job Descriptor and Shared Descriptor
  348. * must fit into the 64-word Descriptor h/w Buffer
  349. */
  350. if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
  351. ctx->cdata.key_inline = true;
  352. ctx->cdata.key_virt = ctx->key;
  353. } else {
  354. ctx->cdata.key_inline = false;
  355. ctx->cdata.key_dma = ctx->key_dma;
  356. }
  357. desc = ctx->sh_desc_enc;
  358. cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
  359. false);
  360. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  361. desc_bytes(desc), ctx->dir);
  362. /*
  363. * Job Descriptor and Shared Descriptors
  364. * must all fit into the 64-word Descriptor h/w Buffer
  365. */
  366. if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
  367. ctx->cdata.key_inline = true;
  368. ctx->cdata.key_virt = ctx->key;
  369. } else {
  370. ctx->cdata.key_inline = false;
  371. ctx->cdata.key_dma = ctx->key_dma;
  372. }
  373. desc = ctx->sh_desc_dec;
  374. cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
  375. false);
  376. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  377. desc_bytes(desc), ctx->dir);
  378. return 0;
  379. }
  380. static int rfc4106_setauthsize(struct crypto_aead *authenc,
  381. unsigned int authsize)
  382. {
  383. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  384. int err;
  385. err = crypto_rfc4106_check_authsize(authsize);
  386. if (err)
  387. return err;
  388. ctx->authsize = authsize;
  389. rfc4106_set_sh_desc(authenc);
  390. return 0;
  391. }
  392. static int rfc4543_set_sh_desc(struct crypto_aead *aead)
  393. {
  394. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  395. struct device *jrdev = ctx->jrdev;
  396. unsigned int ivsize = crypto_aead_ivsize(aead);
  397. u32 *desc;
  398. int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
  399. ctx->cdata.keylen;
  400. if (!ctx->cdata.keylen || !ctx->authsize)
  401. return 0;
  402. /*
  403. * RFC4543 encrypt shared descriptor
  404. * Job Descriptor and Shared Descriptor
  405. * must fit into the 64-word Descriptor h/w Buffer
  406. */
  407. if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
  408. ctx->cdata.key_inline = true;
  409. ctx->cdata.key_virt = ctx->key;
  410. } else {
  411. ctx->cdata.key_inline = false;
  412. ctx->cdata.key_dma = ctx->key_dma;
  413. }
  414. desc = ctx->sh_desc_enc;
  415. cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
  416. false);
  417. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  418. desc_bytes(desc), ctx->dir);
  419. /*
  420. * Job Descriptor and Shared Descriptors
  421. * must all fit into the 64-word Descriptor h/w Buffer
  422. */
  423. if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
  424. ctx->cdata.key_inline = true;
  425. ctx->cdata.key_virt = ctx->key;
  426. } else {
  427. ctx->cdata.key_inline = false;
  428. ctx->cdata.key_dma = ctx->key_dma;
  429. }
  430. desc = ctx->sh_desc_dec;
  431. cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
  432. false);
  433. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  434. desc_bytes(desc), ctx->dir);
  435. return 0;
  436. }
  437. static int rfc4543_setauthsize(struct crypto_aead *authenc,
  438. unsigned int authsize)
  439. {
  440. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  441. if (authsize != 16)
  442. return -EINVAL;
  443. ctx->authsize = authsize;
  444. rfc4543_set_sh_desc(authenc);
  445. return 0;
  446. }
  447. static int chachapoly_set_sh_desc(struct crypto_aead *aead)
  448. {
  449. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  450. struct device *jrdev = ctx->jrdev;
  451. unsigned int ivsize = crypto_aead_ivsize(aead);
  452. u32 *desc;
  453. if (!ctx->cdata.keylen || !ctx->authsize)
  454. return 0;
  455. desc = ctx->sh_desc_enc;
  456. cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
  457. ctx->authsize, true, false);
  458. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  459. desc_bytes(desc), ctx->dir);
  460. desc = ctx->sh_desc_dec;
  461. cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
  462. ctx->authsize, false, false);
  463. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  464. desc_bytes(desc), ctx->dir);
  465. return 0;
  466. }
  467. static int chachapoly_setauthsize(struct crypto_aead *aead,
  468. unsigned int authsize)
  469. {
  470. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  471. if (authsize != POLY1305_DIGEST_SIZE)
  472. return -EINVAL;
  473. ctx->authsize = authsize;
  474. return chachapoly_set_sh_desc(aead);
  475. }
  476. static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
  477. unsigned int keylen)
  478. {
  479. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  480. unsigned int ivsize = crypto_aead_ivsize(aead);
  481. unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize;
  482. if (keylen != CHACHA_KEY_SIZE + saltlen)
  483. return -EINVAL;
  484. memcpy(ctx->key, key, keylen);
  485. ctx->cdata.key_virt = ctx->key;
  486. ctx->cdata.keylen = keylen - saltlen;
  487. return chachapoly_set_sh_desc(aead);
  488. }
  489. static int aead_setkey(struct crypto_aead *aead,
  490. const u8 *key, unsigned int keylen)
  491. {
  492. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  493. struct device *jrdev = ctx->jrdev;
  494. struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
  495. struct crypto_authenc_keys keys;
  496. int ret = 0;
  497. if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
  498. goto badkey;
  499. dev_dbg(jrdev, "keylen %d enckeylen %d authkeylen %d\n",
  500. keys.authkeylen + keys.enckeylen, keys.enckeylen,
  501. keys.authkeylen);
  502. print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
  503. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  504. /*
  505. * If DKP is supported, use it in the shared descriptor to generate
  506. * the split key.
  507. */
  508. if (ctrlpriv->era >= 6) {
  509. ctx->adata.keylen = keys.authkeylen;
  510. ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
  511. OP_ALG_ALGSEL_MASK);
  512. if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
  513. goto badkey;
  514. memcpy(ctx->key, keys.authkey, keys.authkeylen);
  515. memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
  516. keys.enckeylen);
  517. dma_sync_single_for_device(jrdev, ctx->key_dma,
  518. ctx->adata.keylen_pad +
  519. keys.enckeylen, ctx->dir);
  520. goto skip_split_key;
  521. }
  522. ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
  523. keys.authkeylen, CAAM_MAX_KEY_SIZE -
  524. keys.enckeylen);
  525. if (ret) {
  526. goto badkey;
  527. }
  528. /* postpend encryption key to auth split key */
  529. memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
  530. dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
  531. keys.enckeylen, ctx->dir);
  532. print_hex_dump_debug("ctx.key@"__stringify(__LINE__)": ",
  533. DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
  534. ctx->adata.keylen_pad + keys.enckeylen, 1);
  535. skip_split_key:
  536. ctx->cdata.keylen = keys.enckeylen;
  537. memzero_explicit(&keys, sizeof(keys));
  538. return aead_set_sh_desc(aead);
  539. badkey:
  540. memzero_explicit(&keys, sizeof(keys));
  541. return -EINVAL;
  542. }
  543. static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key,
  544. unsigned int keylen)
  545. {
  546. struct crypto_authenc_keys keys;
  547. int err;
  548. err = crypto_authenc_extractkeys(&keys, key, keylen);
  549. if (unlikely(err))
  550. return err;
  551. err = verify_aead_des3_key(aead, keys.enckey, keys.enckeylen) ?:
  552. aead_setkey(aead, key, keylen);
  553. memzero_explicit(&keys, sizeof(keys));
  554. return err;
  555. }
  556. static int gcm_setkey(struct crypto_aead *aead,
  557. const u8 *key, unsigned int keylen)
  558. {
  559. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  560. struct device *jrdev = ctx->jrdev;
  561. int err;
  562. err = aes_check_keylen(keylen);
  563. if (err)
  564. return err;
  565. print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
  566. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  567. memcpy(ctx->key, key, keylen);
  568. dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
  569. ctx->cdata.keylen = keylen;
  570. return gcm_set_sh_desc(aead);
  571. }
  572. static int rfc4106_setkey(struct crypto_aead *aead,
  573. const u8 *key, unsigned int keylen)
  574. {
  575. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  576. struct device *jrdev = ctx->jrdev;
  577. int err;
  578. err = aes_check_keylen(keylen - 4);
  579. if (err)
  580. return err;
  581. print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
  582. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  583. memcpy(ctx->key, key, keylen);
  584. /*
  585. * The last four bytes of the key material are used as the salt value
  586. * in the nonce. Update the AES key length.
  587. */
  588. ctx->cdata.keylen = keylen - 4;
  589. dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
  590. ctx->dir);
  591. return rfc4106_set_sh_desc(aead);
  592. }
  593. static int rfc4543_setkey(struct crypto_aead *aead,
  594. const u8 *key, unsigned int keylen)
  595. {
  596. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  597. struct device *jrdev = ctx->jrdev;
  598. int err;
  599. err = aes_check_keylen(keylen - 4);
  600. if (err)
  601. return err;
  602. print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
  603. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  604. memcpy(ctx->key, key, keylen);
  605. /*
  606. * The last four bytes of the key material are used as the salt value
  607. * in the nonce. Update the AES key length.
  608. */
  609. ctx->cdata.keylen = keylen - 4;
  610. dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
  611. ctx->dir);
  612. return rfc4543_set_sh_desc(aead);
  613. }
  614. static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
  615. unsigned int keylen, const u32 ctx1_iv_off)
  616. {
  617. struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
  618. struct caam_skcipher_alg *alg =
  619. container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
  620. skcipher);
  621. struct device *jrdev = ctx->jrdev;
  622. unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
  623. u32 *desc;
  624. const bool is_rfc3686 = alg->caam.rfc3686;
  625. print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
  626. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  627. ctx->cdata.keylen = keylen;
  628. ctx->cdata.key_virt = key;
  629. ctx->cdata.key_inline = true;
  630. /* skcipher_encrypt shared descriptor */
  631. desc = ctx->sh_desc_enc;
  632. cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
  633. ctx1_iv_off);
  634. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  635. desc_bytes(desc), ctx->dir);
  636. /* skcipher_decrypt shared descriptor */
  637. desc = ctx->sh_desc_dec;
  638. cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
  639. ctx1_iv_off);
  640. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  641. desc_bytes(desc), ctx->dir);
  642. return 0;
  643. }
  644. static int aes_skcipher_setkey(struct crypto_skcipher *skcipher,
  645. const u8 *key, unsigned int keylen)
  646. {
  647. int err;
  648. err = aes_check_keylen(keylen);
  649. if (err)
  650. return err;
  651. return skcipher_setkey(skcipher, key, keylen, 0);
  652. }
  653. static int rfc3686_skcipher_setkey(struct crypto_skcipher *skcipher,
  654. const u8 *key, unsigned int keylen)
  655. {
  656. u32 ctx1_iv_off;
  657. int err;
  658. /*
  659. * RFC3686 specific:
  660. * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
  661. * | *key = {KEY, NONCE}
  662. */
  663. ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
  664. keylen -= CTR_RFC3686_NONCE_SIZE;
  665. err = aes_check_keylen(keylen);
  666. if (err)
  667. return err;
  668. return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off);
  669. }
  670. static int ctr_skcipher_setkey(struct crypto_skcipher *skcipher,
  671. const u8 *key, unsigned int keylen)
  672. {
  673. u32 ctx1_iv_off;
  674. int err;
  675. /*
  676. * AES-CTR needs to load IV in CONTEXT1 reg
  677. * at an offset of 128bits (16bytes)
  678. * CONTEXT1[255:128] = IV
  679. */
  680. ctx1_iv_off = 16;
  681. err = aes_check_keylen(keylen);
  682. if (err)
  683. return err;
  684. return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off);
  685. }
  686. static int des_skcipher_setkey(struct crypto_skcipher *skcipher,
  687. const u8 *key, unsigned int keylen)
  688. {
  689. return verify_skcipher_des_key(skcipher, key) ?:
  690. skcipher_setkey(skcipher, key, keylen, 0);
  691. }
  692. static int des3_skcipher_setkey(struct crypto_skcipher *skcipher,
  693. const u8 *key, unsigned int keylen)
  694. {
  695. return verify_skcipher_des3_key(skcipher, key) ?:
  696. skcipher_setkey(skcipher, key, keylen, 0);
  697. }
  698. static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
  699. unsigned int keylen)
  700. {
  701. struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
  702. struct device *jrdev = ctx->jrdev;
  703. struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
  704. u32 *desc;
  705. int err;
  706. err = xts_verify_key(skcipher, key, keylen);
  707. if (err) {
  708. dev_dbg(jrdev, "key size mismatch\n");
  709. return err;
  710. }
  711. if (keylen != 2 * AES_KEYSIZE_128 && keylen != 2 * AES_KEYSIZE_256)
  712. ctx->xts_key_fallback = true;
  713. if (ctrlpriv->era <= 8 || ctx->xts_key_fallback) {
  714. err = crypto_skcipher_setkey(ctx->fallback, key, keylen);
  715. if (err)
  716. return err;
  717. }
  718. ctx->cdata.keylen = keylen;
  719. ctx->cdata.key_virt = key;
  720. ctx->cdata.key_inline = true;
  721. /* xts_skcipher_encrypt shared descriptor */
  722. desc = ctx->sh_desc_enc;
  723. cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata);
  724. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  725. desc_bytes(desc), ctx->dir);
  726. /* xts_skcipher_decrypt shared descriptor */
  727. desc = ctx->sh_desc_dec;
  728. cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata);
  729. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  730. desc_bytes(desc), ctx->dir);
  731. return 0;
  732. }
  733. /*
  734. * aead_edesc - s/w-extended aead descriptor
  735. * @src_nents: number of segments in input s/w scatterlist
  736. * @dst_nents: number of segments in output s/w scatterlist
  737. * @mapped_src_nents: number of segments in input h/w link table
  738. * @mapped_dst_nents: number of segments in output h/w link table
  739. * @sec4_sg_bytes: length of dma mapped sec4_sg space
  740. * @bklog: stored to determine if the request needs backlog
  741. * @sec4_sg_dma: bus physical mapped address of h/w link table
  742. * @sec4_sg: pointer to h/w link table
  743. * @hw_desc: the h/w job descriptor followed by any referenced link tables
  744. */
  745. struct aead_edesc {
  746. int src_nents;
  747. int dst_nents;
  748. int mapped_src_nents;
  749. int mapped_dst_nents;
  750. int sec4_sg_bytes;
  751. bool bklog;
  752. dma_addr_t sec4_sg_dma;
  753. struct sec4_sg_entry *sec4_sg;
  754. u32 hw_desc[];
  755. };
  756. /*
  757. * skcipher_edesc - s/w-extended skcipher descriptor
  758. * @src_nents: number of segments in input s/w scatterlist
  759. * @dst_nents: number of segments in output s/w scatterlist
  760. * @mapped_src_nents: number of segments in input h/w link table
  761. * @mapped_dst_nents: number of segments in output h/w link table
  762. * @iv_dma: dma address of iv for checking continuity and link table
  763. * @sec4_sg_bytes: length of dma mapped sec4_sg space
  764. * @bklog: stored to determine if the request needs backlog
  765. * @sec4_sg_dma: bus physical mapped address of h/w link table
  766. * @sec4_sg: pointer to h/w link table
  767. * @hw_desc: the h/w job descriptor followed by any referenced link tables
  768. * and IV
  769. */
  770. struct skcipher_edesc {
  771. int src_nents;
  772. int dst_nents;
  773. int mapped_src_nents;
  774. int mapped_dst_nents;
  775. dma_addr_t iv_dma;
  776. int sec4_sg_bytes;
  777. bool bklog;
  778. dma_addr_t sec4_sg_dma;
  779. struct sec4_sg_entry *sec4_sg;
  780. u32 hw_desc[];
  781. };
  782. static void caam_unmap(struct device *dev, struct scatterlist *src,
  783. struct scatterlist *dst, int src_nents,
  784. int dst_nents,
  785. dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
  786. int sec4_sg_bytes)
  787. {
  788. if (dst != src) {
  789. if (src_nents)
  790. dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
  791. if (dst_nents)
  792. dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
  793. } else {
  794. dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
  795. }
  796. if (iv_dma)
  797. dma_unmap_single(dev, iv_dma, ivsize, DMA_BIDIRECTIONAL);
  798. if (sec4_sg_bytes)
  799. dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
  800. DMA_TO_DEVICE);
  801. }
  802. static void aead_unmap(struct device *dev,
  803. struct aead_edesc *edesc,
  804. struct aead_request *req)
  805. {
  806. caam_unmap(dev, req->src, req->dst,
  807. edesc->src_nents, edesc->dst_nents, 0, 0,
  808. edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
  809. }
  810. static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
  811. struct skcipher_request *req)
  812. {
  813. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  814. int ivsize = crypto_skcipher_ivsize(skcipher);
  815. caam_unmap(dev, req->src, req->dst,
  816. edesc->src_nents, edesc->dst_nents,
  817. edesc->iv_dma, ivsize,
  818. edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
  819. }
  820. static void aead_crypt_done(struct device *jrdev, u32 *desc, u32 err,
  821. void *context)
  822. {
  823. struct aead_request *req = context;
  824. struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
  825. struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
  826. struct aead_edesc *edesc;
  827. int ecode = 0;
  828. bool has_bklog;
  829. dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  830. edesc = rctx->edesc;
  831. has_bklog = edesc->bklog;
  832. if (err)
  833. ecode = caam_jr_strstatus(jrdev, err);
  834. aead_unmap(jrdev, edesc, req);
  835. kfree(edesc);
  836. /*
  837. * If no backlog flag, the completion of the request is done
  838. * by CAAM, not crypto engine.
  839. */
  840. if (!has_bklog)
  841. aead_request_complete(req, ecode);
  842. else
  843. crypto_finalize_aead_request(jrp->engine, req, ecode);
  844. }
  845. static void skcipher_crypt_done(struct device *jrdev, u32 *desc, u32 err,
  846. void *context)
  847. {
  848. struct skcipher_request *req = context;
  849. struct skcipher_edesc *edesc;
  850. struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
  851. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  852. struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
  853. int ivsize = crypto_skcipher_ivsize(skcipher);
  854. int ecode = 0;
  855. bool has_bklog;
  856. dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  857. edesc = rctx->edesc;
  858. has_bklog = edesc->bklog;
  859. if (err)
  860. ecode = caam_jr_strstatus(jrdev, err);
  861. skcipher_unmap(jrdev, edesc, req);
  862. /*
  863. * The crypto API expects us to set the IV (req->iv) to the last
  864. * ciphertext block (CBC mode) or last counter (CTR mode).
  865. * This is used e.g. by the CTS mode.
  866. */
  867. if (ivsize && !ecode) {
  868. memcpy(req->iv, (u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes,
  869. ivsize);
  870. print_hex_dump_debug("dstiv @" __stringify(__LINE__)": ",
  871. DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
  872. ivsize, 1);
  873. }
  874. caam_dump_sg("dst @" __stringify(__LINE__)": ",
  875. DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
  876. edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
  877. kfree(edesc);
  878. /*
  879. * If no backlog flag, the completion of the request is done
  880. * by CAAM, not crypto engine.
  881. */
  882. if (!has_bklog)
  883. skcipher_request_complete(req, ecode);
  884. else
  885. crypto_finalize_skcipher_request(jrp->engine, req, ecode);
  886. }
  887. /*
  888. * Fill in aead job descriptor
  889. */
  890. static void init_aead_job(struct aead_request *req,
  891. struct aead_edesc *edesc,
  892. bool all_contig, bool encrypt)
  893. {
  894. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  895. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  896. int authsize = ctx->authsize;
  897. u32 *desc = edesc->hw_desc;
  898. u32 out_options, in_options;
  899. dma_addr_t dst_dma, src_dma;
  900. int len, sec4_sg_index = 0;
  901. dma_addr_t ptr;
  902. u32 *sh_desc;
  903. sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
  904. ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
  905. len = desc_len(sh_desc);
  906. init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
  907. if (all_contig) {
  908. src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) :
  909. 0;
  910. in_options = 0;
  911. } else {
  912. src_dma = edesc->sec4_sg_dma;
  913. sec4_sg_index += edesc->mapped_src_nents;
  914. in_options = LDST_SGF;
  915. }
  916. append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
  917. in_options);
  918. dst_dma = src_dma;
  919. out_options = in_options;
  920. if (unlikely(req->src != req->dst)) {
  921. if (!edesc->mapped_dst_nents) {
  922. dst_dma = 0;
  923. out_options = 0;
  924. } else if (edesc->mapped_dst_nents == 1) {
  925. dst_dma = sg_dma_address(req->dst);
  926. out_options = 0;
  927. } else {
  928. dst_dma = edesc->sec4_sg_dma +
  929. sec4_sg_index *
  930. sizeof(struct sec4_sg_entry);
  931. out_options = LDST_SGF;
  932. }
  933. }
  934. if (encrypt)
  935. append_seq_out_ptr(desc, dst_dma,
  936. req->assoclen + req->cryptlen + authsize,
  937. out_options);
  938. else
  939. append_seq_out_ptr(desc, dst_dma,
  940. req->assoclen + req->cryptlen - authsize,
  941. out_options);
  942. }
  943. static void init_gcm_job(struct aead_request *req,
  944. struct aead_edesc *edesc,
  945. bool all_contig, bool encrypt)
  946. {
  947. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  948. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  949. unsigned int ivsize = crypto_aead_ivsize(aead);
  950. u32 *desc = edesc->hw_desc;
  951. bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
  952. unsigned int last;
  953. init_aead_job(req, edesc, all_contig, encrypt);
  954. append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
  955. /* BUG This should not be specific to generic GCM. */
  956. last = 0;
  957. if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
  958. last = FIFOLD_TYPE_LAST1;
  959. /* Read GCM IV */
  960. append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
  961. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last);
  962. /* Append Salt */
  963. if (!generic_gcm)
  964. append_data(desc, ctx->key + ctx->cdata.keylen, 4);
  965. /* Append IV */
  966. append_data(desc, req->iv, ivsize);
  967. /* End of blank commands */
  968. }
  969. static void init_chachapoly_job(struct aead_request *req,
  970. struct aead_edesc *edesc, bool all_contig,
  971. bool encrypt)
  972. {
  973. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  974. unsigned int ivsize = crypto_aead_ivsize(aead);
  975. unsigned int assoclen = req->assoclen;
  976. u32 *desc = edesc->hw_desc;
  977. u32 ctx_iv_off = 4;
  978. init_aead_job(req, edesc, all_contig, encrypt);
  979. if (ivsize != CHACHAPOLY_IV_SIZE) {
  980. /* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */
  981. ctx_iv_off += 4;
  982. /*
  983. * The associated data comes already with the IV but we need
  984. * to skip it when we authenticate or encrypt...
  985. */
  986. assoclen -= ivsize;
  987. }
  988. append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen);
  989. /*
  990. * For IPsec load the IV further in the same register.
  991. * For RFC7539 simply load the 12 bytes nonce in a single operation
  992. */
  993. append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB |
  994. LDST_SRCDST_BYTE_CONTEXT |
  995. ctx_iv_off << LDST_OFFSET_SHIFT);
  996. }
  997. static void init_authenc_job(struct aead_request *req,
  998. struct aead_edesc *edesc,
  999. bool all_contig, bool encrypt)
  1000. {
  1001. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1002. struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
  1003. struct caam_aead_alg, aead);
  1004. unsigned int ivsize = crypto_aead_ivsize(aead);
  1005. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1006. struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
  1007. const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
  1008. OP_ALG_AAI_CTR_MOD128);
  1009. const bool is_rfc3686 = alg->caam.rfc3686;
  1010. u32 *desc = edesc->hw_desc;
  1011. u32 ivoffset = 0;
  1012. /*
  1013. * AES-CTR needs to load IV in CONTEXT1 reg
  1014. * at an offset of 128bits (16bytes)
  1015. * CONTEXT1[255:128] = IV
  1016. */
  1017. if (ctr_mode)
  1018. ivoffset = 16;
  1019. /*
  1020. * RFC3686 specific:
  1021. * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
  1022. */
  1023. if (is_rfc3686)
  1024. ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
  1025. init_aead_job(req, edesc, all_contig, encrypt);
  1026. /*
  1027. * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
  1028. * having DPOVRD as destination.
  1029. */
  1030. if (ctrlpriv->era < 3)
  1031. append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
  1032. else
  1033. append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
  1034. if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
  1035. append_load_as_imm(desc, req->iv, ivsize,
  1036. LDST_CLASS_1_CCB |
  1037. LDST_SRCDST_BYTE_CONTEXT |
  1038. (ivoffset << LDST_OFFSET_SHIFT));
  1039. }
  1040. /*
  1041. * Fill in skcipher job descriptor
  1042. */
  1043. static void init_skcipher_job(struct skcipher_request *req,
  1044. struct skcipher_edesc *edesc,
  1045. const bool encrypt)
  1046. {
  1047. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  1048. struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
  1049. struct device *jrdev = ctx->jrdev;
  1050. int ivsize = crypto_skcipher_ivsize(skcipher);
  1051. u32 *desc = edesc->hw_desc;
  1052. u32 *sh_desc;
  1053. u32 in_options = 0, out_options = 0;
  1054. dma_addr_t src_dma, dst_dma, ptr;
  1055. int len, sec4_sg_index = 0;
  1056. print_hex_dump_debug("presciv@"__stringify(__LINE__)": ",
  1057. DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
  1058. dev_dbg(jrdev, "asked=%d, cryptlen%d\n",
  1059. (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen);
  1060. caam_dump_sg("src @" __stringify(__LINE__)": ",
  1061. DUMP_PREFIX_ADDRESS, 16, 4, req->src,
  1062. edesc->src_nents > 1 ? 100 : req->cryptlen, 1);
  1063. sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
  1064. ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
  1065. len = desc_len(sh_desc);
  1066. init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
  1067. if (ivsize || edesc->mapped_src_nents > 1) {
  1068. src_dma = edesc->sec4_sg_dma;
  1069. sec4_sg_index = edesc->mapped_src_nents + !!ivsize;
  1070. in_options = LDST_SGF;
  1071. } else {
  1072. src_dma = sg_dma_address(req->src);
  1073. }
  1074. append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options);
  1075. if (likely(req->src == req->dst)) {
  1076. dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry);
  1077. out_options = in_options;
  1078. } else if (!ivsize && edesc->mapped_dst_nents == 1) {
  1079. dst_dma = sg_dma_address(req->dst);
  1080. } else {
  1081. dst_dma = edesc->sec4_sg_dma + sec4_sg_index *
  1082. sizeof(struct sec4_sg_entry);
  1083. out_options = LDST_SGF;
  1084. }
  1085. append_seq_out_ptr(desc, dst_dma, req->cryptlen + ivsize, out_options);
  1086. }
  1087. /*
  1088. * allocate and map the aead extended descriptor
  1089. */
  1090. static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
  1091. int desc_bytes, bool *all_contig_ptr,
  1092. bool encrypt)
  1093. {
  1094. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1095. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1096. struct device *jrdev = ctx->jrdev;
  1097. struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
  1098. gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
  1099. GFP_KERNEL : GFP_ATOMIC;
  1100. int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
  1101. int src_len, dst_len = 0;
  1102. struct aead_edesc *edesc;
  1103. int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
  1104. unsigned int authsize = ctx->authsize;
  1105. if (unlikely(req->dst != req->src)) {
  1106. src_len = req->assoclen + req->cryptlen;
  1107. dst_len = src_len + (encrypt ? authsize : (-authsize));
  1108. src_nents = sg_nents_for_len(req->src, src_len);
  1109. if (unlikely(src_nents < 0)) {
  1110. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  1111. src_len);
  1112. return ERR_PTR(src_nents);
  1113. }
  1114. dst_nents = sg_nents_for_len(req->dst, dst_len);
  1115. if (unlikely(dst_nents < 0)) {
  1116. dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
  1117. dst_len);
  1118. return ERR_PTR(dst_nents);
  1119. }
  1120. } else {
  1121. src_len = req->assoclen + req->cryptlen +
  1122. (encrypt ? authsize : 0);
  1123. src_nents = sg_nents_for_len(req->src, src_len);
  1124. if (unlikely(src_nents < 0)) {
  1125. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  1126. src_len);
  1127. return ERR_PTR(src_nents);
  1128. }
  1129. }
  1130. if (likely(req->src == req->dst)) {
  1131. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1132. DMA_BIDIRECTIONAL);
  1133. if (unlikely(!mapped_src_nents)) {
  1134. dev_err(jrdev, "unable to map source\n");
  1135. return ERR_PTR(-ENOMEM);
  1136. }
  1137. } else {
  1138. /* Cover also the case of null (zero length) input data */
  1139. if (src_nents) {
  1140. mapped_src_nents = dma_map_sg(jrdev, req->src,
  1141. src_nents, DMA_TO_DEVICE);
  1142. if (unlikely(!mapped_src_nents)) {
  1143. dev_err(jrdev, "unable to map source\n");
  1144. return ERR_PTR(-ENOMEM);
  1145. }
  1146. } else {
  1147. mapped_src_nents = 0;
  1148. }
  1149. /* Cover also the case of null (zero length) output data */
  1150. if (dst_nents) {
  1151. mapped_dst_nents = dma_map_sg(jrdev, req->dst,
  1152. dst_nents,
  1153. DMA_FROM_DEVICE);
  1154. if (unlikely(!mapped_dst_nents)) {
  1155. dev_err(jrdev, "unable to map destination\n");
  1156. dma_unmap_sg(jrdev, req->src, src_nents,
  1157. DMA_TO_DEVICE);
  1158. return ERR_PTR(-ENOMEM);
  1159. }
  1160. } else {
  1161. mapped_dst_nents = 0;
  1162. }
  1163. }
  1164. /*
  1165. * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
  1166. * the end of the table by allocating more S/G entries.
  1167. */
  1168. sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
  1169. if (mapped_dst_nents > 1)
  1170. sec4_sg_len += pad_sg_nents(mapped_dst_nents);
  1171. else
  1172. sec4_sg_len = pad_sg_nents(sec4_sg_len);
  1173. sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
  1174. /* allocate space for base edesc and hw desc commands, link tables */
  1175. edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
  1176. GFP_DMA | flags);
  1177. if (!edesc) {
  1178. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
  1179. 0, 0, 0);
  1180. return ERR_PTR(-ENOMEM);
  1181. }
  1182. edesc->src_nents = src_nents;
  1183. edesc->dst_nents = dst_nents;
  1184. edesc->mapped_src_nents = mapped_src_nents;
  1185. edesc->mapped_dst_nents = mapped_dst_nents;
  1186. edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
  1187. desc_bytes;
  1188. rctx->edesc = edesc;
  1189. *all_contig_ptr = !(mapped_src_nents > 1);
  1190. sec4_sg_index = 0;
  1191. if (mapped_src_nents > 1) {
  1192. sg_to_sec4_sg_last(req->src, src_len,
  1193. edesc->sec4_sg + sec4_sg_index, 0);
  1194. sec4_sg_index += mapped_src_nents;
  1195. }
  1196. if (mapped_dst_nents > 1) {
  1197. sg_to_sec4_sg_last(req->dst, dst_len,
  1198. edesc->sec4_sg + sec4_sg_index, 0);
  1199. }
  1200. if (!sec4_sg_bytes)
  1201. return edesc;
  1202. edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
  1203. sec4_sg_bytes, DMA_TO_DEVICE);
  1204. if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
  1205. dev_err(jrdev, "unable to map S/G table\n");
  1206. aead_unmap(jrdev, edesc, req);
  1207. kfree(edesc);
  1208. return ERR_PTR(-ENOMEM);
  1209. }
  1210. edesc->sec4_sg_bytes = sec4_sg_bytes;
  1211. return edesc;
  1212. }
  1213. static int aead_enqueue_req(struct device *jrdev, struct aead_request *req)
  1214. {
  1215. struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
  1216. struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
  1217. struct aead_edesc *edesc = rctx->edesc;
  1218. u32 *desc = edesc->hw_desc;
  1219. int ret;
  1220. /*
  1221. * Only the backlog request are sent to crypto-engine since the others
  1222. * can be handled by CAAM, if free, especially since JR has up to 1024
  1223. * entries (more than the 10 entries from crypto-engine).
  1224. */
  1225. if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)
  1226. ret = crypto_transfer_aead_request_to_engine(jrpriv->engine,
  1227. req);
  1228. else
  1229. ret = caam_jr_enqueue(jrdev, desc, aead_crypt_done, req);
  1230. if ((ret != -EINPROGRESS) && (ret != -EBUSY)) {
  1231. aead_unmap(jrdev, edesc, req);
  1232. kfree(rctx->edesc);
  1233. }
  1234. return ret;
  1235. }
  1236. static inline int chachapoly_crypt(struct aead_request *req, bool encrypt)
  1237. {
  1238. struct aead_edesc *edesc;
  1239. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1240. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1241. struct device *jrdev = ctx->jrdev;
  1242. bool all_contig;
  1243. u32 *desc;
  1244. edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
  1245. encrypt);
  1246. if (IS_ERR(edesc))
  1247. return PTR_ERR(edesc);
  1248. desc = edesc->hw_desc;
  1249. init_chachapoly_job(req, edesc, all_contig, encrypt);
  1250. print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
  1251. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  1252. 1);
  1253. return aead_enqueue_req(jrdev, req);
  1254. }
  1255. static int chachapoly_encrypt(struct aead_request *req)
  1256. {
  1257. return chachapoly_crypt(req, true);
  1258. }
  1259. static int chachapoly_decrypt(struct aead_request *req)
  1260. {
  1261. return chachapoly_crypt(req, false);
  1262. }
  1263. static inline int aead_crypt(struct aead_request *req, bool encrypt)
  1264. {
  1265. struct aead_edesc *edesc;
  1266. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1267. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1268. struct device *jrdev = ctx->jrdev;
  1269. bool all_contig;
  1270. /* allocate extended descriptor */
  1271. edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
  1272. &all_contig, encrypt);
  1273. if (IS_ERR(edesc))
  1274. return PTR_ERR(edesc);
  1275. /* Create and submit job descriptor */
  1276. init_authenc_job(req, edesc, all_contig, encrypt);
  1277. print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
  1278. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1279. desc_bytes(edesc->hw_desc), 1);
  1280. return aead_enqueue_req(jrdev, req);
  1281. }
  1282. static int aead_encrypt(struct aead_request *req)
  1283. {
  1284. return aead_crypt(req, true);
  1285. }
  1286. static int aead_decrypt(struct aead_request *req)
  1287. {
  1288. return aead_crypt(req, false);
  1289. }
  1290. static int aead_do_one_req(struct crypto_engine *engine, void *areq)
  1291. {
  1292. struct aead_request *req = aead_request_cast(areq);
  1293. struct caam_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
  1294. struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
  1295. u32 *desc = rctx->edesc->hw_desc;
  1296. int ret;
  1297. rctx->edesc->bklog = true;
  1298. ret = caam_jr_enqueue(ctx->jrdev, desc, aead_crypt_done, req);
  1299. if (ret == -ENOSPC && engine->retry_support)
  1300. return ret;
  1301. if (ret != -EINPROGRESS) {
  1302. aead_unmap(ctx->jrdev, rctx->edesc, req);
  1303. kfree(rctx->edesc);
  1304. } else {
  1305. ret = 0;
  1306. }
  1307. return ret;
  1308. }
  1309. static inline int gcm_crypt(struct aead_request *req, bool encrypt)
  1310. {
  1311. struct aead_edesc *edesc;
  1312. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1313. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1314. struct device *jrdev = ctx->jrdev;
  1315. bool all_contig;
  1316. /* allocate extended descriptor */
  1317. edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig,
  1318. encrypt);
  1319. if (IS_ERR(edesc))
  1320. return PTR_ERR(edesc);
  1321. /* Create and submit job descriptor */
  1322. init_gcm_job(req, edesc, all_contig, encrypt);
  1323. print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
  1324. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1325. desc_bytes(edesc->hw_desc), 1);
  1326. return aead_enqueue_req(jrdev, req);
  1327. }
  1328. static int gcm_encrypt(struct aead_request *req)
  1329. {
  1330. return gcm_crypt(req, true);
  1331. }
  1332. static int gcm_decrypt(struct aead_request *req)
  1333. {
  1334. return gcm_crypt(req, false);
  1335. }
  1336. static int ipsec_gcm_encrypt(struct aead_request *req)
  1337. {
  1338. return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_encrypt(req);
  1339. }
  1340. static int ipsec_gcm_decrypt(struct aead_request *req)
  1341. {
  1342. return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_decrypt(req);
  1343. }
  1344. /*
  1345. * allocate and map the skcipher extended descriptor for skcipher
  1346. */
  1347. static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
  1348. int desc_bytes)
  1349. {
  1350. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  1351. struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
  1352. struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
  1353. struct device *jrdev = ctx->jrdev;
  1354. gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
  1355. GFP_KERNEL : GFP_ATOMIC;
  1356. int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
  1357. struct skcipher_edesc *edesc;
  1358. dma_addr_t iv_dma = 0;
  1359. u8 *iv;
  1360. int ivsize = crypto_skcipher_ivsize(skcipher);
  1361. int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
  1362. src_nents = sg_nents_for_len(req->src, req->cryptlen);
  1363. if (unlikely(src_nents < 0)) {
  1364. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  1365. req->cryptlen);
  1366. return ERR_PTR(src_nents);
  1367. }
  1368. if (req->dst != req->src) {
  1369. dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
  1370. if (unlikely(dst_nents < 0)) {
  1371. dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
  1372. req->cryptlen);
  1373. return ERR_PTR(dst_nents);
  1374. }
  1375. }
  1376. if (likely(req->src == req->dst)) {
  1377. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1378. DMA_BIDIRECTIONAL);
  1379. if (unlikely(!mapped_src_nents)) {
  1380. dev_err(jrdev, "unable to map source\n");
  1381. return ERR_PTR(-ENOMEM);
  1382. }
  1383. } else {
  1384. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1385. DMA_TO_DEVICE);
  1386. if (unlikely(!mapped_src_nents)) {
  1387. dev_err(jrdev, "unable to map source\n");
  1388. return ERR_PTR(-ENOMEM);
  1389. }
  1390. mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
  1391. DMA_FROM_DEVICE);
  1392. if (unlikely(!mapped_dst_nents)) {
  1393. dev_err(jrdev, "unable to map destination\n");
  1394. dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
  1395. return ERR_PTR(-ENOMEM);
  1396. }
  1397. }
  1398. if (!ivsize && mapped_src_nents == 1)
  1399. sec4_sg_ents = 0; // no need for an input hw s/g table
  1400. else
  1401. sec4_sg_ents = mapped_src_nents + !!ivsize;
  1402. dst_sg_idx = sec4_sg_ents;
  1403. /*
  1404. * Input, output HW S/G tables: [IV, src][dst, IV]
  1405. * IV entries point to the same buffer
  1406. * If src == dst, S/G entries are reused (S/G tables overlap)
  1407. *
  1408. * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
  1409. * the end of the table by allocating more S/G entries. Logic:
  1410. * if (output S/G)
  1411. * pad output S/G, if needed
  1412. * else if (input S/G) ...
  1413. * pad input S/G, if needed
  1414. */
  1415. if (ivsize || mapped_dst_nents > 1) {
  1416. if (req->src == req->dst)
  1417. sec4_sg_ents = !!ivsize + pad_sg_nents(sec4_sg_ents);
  1418. else
  1419. sec4_sg_ents += pad_sg_nents(mapped_dst_nents +
  1420. !!ivsize);
  1421. } else {
  1422. sec4_sg_ents = pad_sg_nents(sec4_sg_ents);
  1423. }
  1424. sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
  1425. /*
  1426. * allocate space for base edesc and hw desc commands, link tables, IV
  1427. */
  1428. edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize,
  1429. GFP_DMA | flags);
  1430. if (!edesc) {
  1431. dev_err(jrdev, "could not allocate extended descriptor\n");
  1432. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
  1433. 0, 0, 0);
  1434. return ERR_PTR(-ENOMEM);
  1435. }
  1436. edesc->src_nents = src_nents;
  1437. edesc->dst_nents = dst_nents;
  1438. edesc->mapped_src_nents = mapped_src_nents;
  1439. edesc->mapped_dst_nents = mapped_dst_nents;
  1440. edesc->sec4_sg_bytes = sec4_sg_bytes;
  1441. edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
  1442. desc_bytes);
  1443. rctx->edesc = edesc;
  1444. /* Make sure IV is located in a DMAable area */
  1445. if (ivsize) {
  1446. iv = (u8 *)edesc->sec4_sg + sec4_sg_bytes;
  1447. memcpy(iv, req->iv, ivsize);
  1448. iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL);
  1449. if (dma_mapping_error(jrdev, iv_dma)) {
  1450. dev_err(jrdev, "unable to map IV\n");
  1451. caam_unmap(jrdev, req->src, req->dst, src_nents,
  1452. dst_nents, 0, 0, 0, 0);
  1453. kfree(edesc);
  1454. return ERR_PTR(-ENOMEM);
  1455. }
  1456. dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
  1457. }
  1458. if (dst_sg_idx)
  1459. sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg +
  1460. !!ivsize, 0);
  1461. if (req->src != req->dst && (ivsize || mapped_dst_nents > 1))
  1462. sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg +
  1463. dst_sg_idx, 0);
  1464. if (ivsize)
  1465. dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx +
  1466. mapped_dst_nents, iv_dma, ivsize, 0);
  1467. if (ivsize || mapped_dst_nents > 1)
  1468. sg_to_sec4_set_last(edesc->sec4_sg + dst_sg_idx +
  1469. mapped_dst_nents - 1 + !!ivsize);
  1470. if (sec4_sg_bytes) {
  1471. edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
  1472. sec4_sg_bytes,
  1473. DMA_TO_DEVICE);
  1474. if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
  1475. dev_err(jrdev, "unable to map S/G table\n");
  1476. caam_unmap(jrdev, req->src, req->dst, src_nents,
  1477. dst_nents, iv_dma, ivsize, 0, 0);
  1478. kfree(edesc);
  1479. return ERR_PTR(-ENOMEM);
  1480. }
  1481. }
  1482. edesc->iv_dma = iv_dma;
  1483. print_hex_dump_debug("skcipher sec4_sg@" __stringify(__LINE__)": ",
  1484. DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
  1485. sec4_sg_bytes, 1);
  1486. return edesc;
  1487. }
  1488. static int skcipher_do_one_req(struct crypto_engine *engine, void *areq)
  1489. {
  1490. struct skcipher_request *req = skcipher_request_cast(areq);
  1491. struct caam_ctx *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
  1492. struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
  1493. u32 *desc = rctx->edesc->hw_desc;
  1494. int ret;
  1495. rctx->edesc->bklog = true;
  1496. ret = caam_jr_enqueue(ctx->jrdev, desc, skcipher_crypt_done, req);
  1497. if (ret == -ENOSPC && engine->retry_support)
  1498. return ret;
  1499. if (ret != -EINPROGRESS) {
  1500. skcipher_unmap(ctx->jrdev, rctx->edesc, req);
  1501. kfree(rctx->edesc);
  1502. } else {
  1503. ret = 0;
  1504. }
  1505. return ret;
  1506. }
  1507. static inline bool xts_skcipher_ivsize(struct skcipher_request *req)
  1508. {
  1509. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  1510. unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
  1511. return !!get_unaligned((u64 *)(req->iv + (ivsize / 2)));
  1512. }
  1513. static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
  1514. {
  1515. struct skcipher_edesc *edesc;
  1516. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  1517. struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
  1518. struct device *jrdev = ctx->jrdev;
  1519. struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
  1520. struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
  1521. u32 *desc;
  1522. int ret = 0;
  1523. /*
  1524. * XTS is expected to return an error even for input length = 0
  1525. * Note that the case input length < block size will be caught during
  1526. * HW offloading and return an error.
  1527. */
  1528. if (!req->cryptlen && !ctx->fallback)
  1529. return 0;
  1530. if (ctx->fallback && ((ctrlpriv->era <= 8 && xts_skcipher_ivsize(req)) ||
  1531. ctx->xts_key_fallback)) {
  1532. struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
  1533. skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback);
  1534. skcipher_request_set_callback(&rctx->fallback_req,
  1535. req->base.flags,
  1536. req->base.complete,
  1537. req->base.data);
  1538. skcipher_request_set_crypt(&rctx->fallback_req, req->src,
  1539. req->dst, req->cryptlen, req->iv);
  1540. return encrypt ? crypto_skcipher_encrypt(&rctx->fallback_req) :
  1541. crypto_skcipher_decrypt(&rctx->fallback_req);
  1542. }
  1543. /* allocate extended descriptor */
  1544. edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
  1545. if (IS_ERR(edesc))
  1546. return PTR_ERR(edesc);
  1547. /* Create and submit job descriptor*/
  1548. init_skcipher_job(req, edesc, encrypt);
  1549. print_hex_dump_debug("skcipher jobdesc@" __stringify(__LINE__)": ",
  1550. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1551. desc_bytes(edesc->hw_desc), 1);
  1552. desc = edesc->hw_desc;
  1553. /*
  1554. * Only the backlog request are sent to crypto-engine since the others
  1555. * can be handled by CAAM, if free, especially since JR has up to 1024
  1556. * entries (more than the 10 entries from crypto-engine).
  1557. */
  1558. if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)
  1559. ret = crypto_transfer_skcipher_request_to_engine(jrpriv->engine,
  1560. req);
  1561. else
  1562. ret = caam_jr_enqueue(jrdev, desc, skcipher_crypt_done, req);
  1563. if ((ret != -EINPROGRESS) && (ret != -EBUSY)) {
  1564. skcipher_unmap(jrdev, edesc, req);
  1565. kfree(edesc);
  1566. }
  1567. return ret;
  1568. }
  1569. static int skcipher_encrypt(struct skcipher_request *req)
  1570. {
  1571. return skcipher_crypt(req, true);
  1572. }
  1573. static int skcipher_decrypt(struct skcipher_request *req)
  1574. {
  1575. return skcipher_crypt(req, false);
  1576. }
  1577. static struct caam_skcipher_alg driver_algs[] = {
  1578. {
  1579. .skcipher = {
  1580. .base = {
  1581. .cra_name = "cbc(aes)",
  1582. .cra_driver_name = "cbc-aes-caam",
  1583. .cra_blocksize = AES_BLOCK_SIZE,
  1584. },
  1585. .setkey = aes_skcipher_setkey,
  1586. .encrypt = skcipher_encrypt,
  1587. .decrypt = skcipher_decrypt,
  1588. .min_keysize = AES_MIN_KEY_SIZE,
  1589. .max_keysize = AES_MAX_KEY_SIZE,
  1590. .ivsize = AES_BLOCK_SIZE,
  1591. },
  1592. .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1593. },
  1594. {
  1595. .skcipher = {
  1596. .base = {
  1597. .cra_name = "cbc(des3_ede)",
  1598. .cra_driver_name = "cbc-3des-caam",
  1599. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  1600. },
  1601. .setkey = des3_skcipher_setkey,
  1602. .encrypt = skcipher_encrypt,
  1603. .decrypt = skcipher_decrypt,
  1604. .min_keysize = DES3_EDE_KEY_SIZE,
  1605. .max_keysize = DES3_EDE_KEY_SIZE,
  1606. .ivsize = DES3_EDE_BLOCK_SIZE,
  1607. },
  1608. .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  1609. },
  1610. {
  1611. .skcipher = {
  1612. .base = {
  1613. .cra_name = "cbc(des)",
  1614. .cra_driver_name = "cbc-des-caam",
  1615. .cra_blocksize = DES_BLOCK_SIZE,
  1616. },
  1617. .setkey = des_skcipher_setkey,
  1618. .encrypt = skcipher_encrypt,
  1619. .decrypt = skcipher_decrypt,
  1620. .min_keysize = DES_KEY_SIZE,
  1621. .max_keysize = DES_KEY_SIZE,
  1622. .ivsize = DES_BLOCK_SIZE,
  1623. },
  1624. .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  1625. },
  1626. {
  1627. .skcipher = {
  1628. .base = {
  1629. .cra_name = "ctr(aes)",
  1630. .cra_driver_name = "ctr-aes-caam",
  1631. .cra_blocksize = 1,
  1632. },
  1633. .setkey = ctr_skcipher_setkey,
  1634. .encrypt = skcipher_encrypt,
  1635. .decrypt = skcipher_decrypt,
  1636. .min_keysize = AES_MIN_KEY_SIZE,
  1637. .max_keysize = AES_MAX_KEY_SIZE,
  1638. .ivsize = AES_BLOCK_SIZE,
  1639. .chunksize = AES_BLOCK_SIZE,
  1640. },
  1641. .caam.class1_alg_type = OP_ALG_ALGSEL_AES |
  1642. OP_ALG_AAI_CTR_MOD128,
  1643. },
  1644. {
  1645. .skcipher = {
  1646. .base = {
  1647. .cra_name = "rfc3686(ctr(aes))",
  1648. .cra_driver_name = "rfc3686-ctr-aes-caam",
  1649. .cra_blocksize = 1,
  1650. },
  1651. .setkey = rfc3686_skcipher_setkey,
  1652. .encrypt = skcipher_encrypt,
  1653. .decrypt = skcipher_decrypt,
  1654. .min_keysize = AES_MIN_KEY_SIZE +
  1655. CTR_RFC3686_NONCE_SIZE,
  1656. .max_keysize = AES_MAX_KEY_SIZE +
  1657. CTR_RFC3686_NONCE_SIZE,
  1658. .ivsize = CTR_RFC3686_IV_SIZE,
  1659. .chunksize = AES_BLOCK_SIZE,
  1660. },
  1661. .caam = {
  1662. .class1_alg_type = OP_ALG_ALGSEL_AES |
  1663. OP_ALG_AAI_CTR_MOD128,
  1664. .rfc3686 = true,
  1665. },
  1666. },
  1667. {
  1668. .skcipher = {
  1669. .base = {
  1670. .cra_name = "xts(aes)",
  1671. .cra_driver_name = "xts-aes-caam",
  1672. .cra_flags = CRYPTO_ALG_NEED_FALLBACK,
  1673. .cra_blocksize = AES_BLOCK_SIZE,
  1674. },
  1675. .setkey = xts_skcipher_setkey,
  1676. .encrypt = skcipher_encrypt,
  1677. .decrypt = skcipher_decrypt,
  1678. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  1679. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  1680. .ivsize = AES_BLOCK_SIZE,
  1681. },
  1682. .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
  1683. },
  1684. {
  1685. .skcipher = {
  1686. .base = {
  1687. .cra_name = "ecb(des)",
  1688. .cra_driver_name = "ecb-des-caam",
  1689. .cra_blocksize = DES_BLOCK_SIZE,
  1690. },
  1691. .setkey = des_skcipher_setkey,
  1692. .encrypt = skcipher_encrypt,
  1693. .decrypt = skcipher_decrypt,
  1694. .min_keysize = DES_KEY_SIZE,
  1695. .max_keysize = DES_KEY_SIZE,
  1696. },
  1697. .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB,
  1698. },
  1699. {
  1700. .skcipher = {
  1701. .base = {
  1702. .cra_name = "ecb(aes)",
  1703. .cra_driver_name = "ecb-aes-caam",
  1704. .cra_blocksize = AES_BLOCK_SIZE,
  1705. },
  1706. .setkey = aes_skcipher_setkey,
  1707. .encrypt = skcipher_encrypt,
  1708. .decrypt = skcipher_decrypt,
  1709. .min_keysize = AES_MIN_KEY_SIZE,
  1710. .max_keysize = AES_MAX_KEY_SIZE,
  1711. },
  1712. .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB,
  1713. },
  1714. {
  1715. .skcipher = {
  1716. .base = {
  1717. .cra_name = "ecb(des3_ede)",
  1718. .cra_driver_name = "ecb-des3-caam",
  1719. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  1720. },
  1721. .setkey = des3_skcipher_setkey,
  1722. .encrypt = skcipher_encrypt,
  1723. .decrypt = skcipher_decrypt,
  1724. .min_keysize = DES3_EDE_KEY_SIZE,
  1725. .max_keysize = DES3_EDE_KEY_SIZE,
  1726. },
  1727. .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB,
  1728. },
  1729. };
  1730. static struct caam_aead_alg driver_aeads[] = {
  1731. {
  1732. .aead = {
  1733. .base = {
  1734. .cra_name = "rfc4106(gcm(aes))",
  1735. .cra_driver_name = "rfc4106-gcm-aes-caam",
  1736. .cra_blocksize = 1,
  1737. },
  1738. .setkey = rfc4106_setkey,
  1739. .setauthsize = rfc4106_setauthsize,
  1740. .encrypt = ipsec_gcm_encrypt,
  1741. .decrypt = ipsec_gcm_decrypt,
  1742. .ivsize = GCM_RFC4106_IV_SIZE,
  1743. .maxauthsize = AES_BLOCK_SIZE,
  1744. },
  1745. .caam = {
  1746. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
  1747. .nodkp = true,
  1748. },
  1749. },
  1750. {
  1751. .aead = {
  1752. .base = {
  1753. .cra_name = "rfc4543(gcm(aes))",
  1754. .cra_driver_name = "rfc4543-gcm-aes-caam",
  1755. .cra_blocksize = 1,
  1756. },
  1757. .setkey = rfc4543_setkey,
  1758. .setauthsize = rfc4543_setauthsize,
  1759. .encrypt = ipsec_gcm_encrypt,
  1760. .decrypt = ipsec_gcm_decrypt,
  1761. .ivsize = GCM_RFC4543_IV_SIZE,
  1762. .maxauthsize = AES_BLOCK_SIZE,
  1763. },
  1764. .caam = {
  1765. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
  1766. .nodkp = true,
  1767. },
  1768. },
  1769. /* Galois Counter Mode */
  1770. {
  1771. .aead = {
  1772. .base = {
  1773. .cra_name = "gcm(aes)",
  1774. .cra_driver_name = "gcm-aes-caam",
  1775. .cra_blocksize = 1,
  1776. },
  1777. .setkey = gcm_setkey,
  1778. .setauthsize = gcm_setauthsize,
  1779. .encrypt = gcm_encrypt,
  1780. .decrypt = gcm_decrypt,
  1781. .ivsize = GCM_AES_IV_SIZE,
  1782. .maxauthsize = AES_BLOCK_SIZE,
  1783. },
  1784. .caam = {
  1785. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
  1786. .nodkp = true,
  1787. },
  1788. },
  1789. /* single-pass ipsec_esp descriptor */
  1790. {
  1791. .aead = {
  1792. .base = {
  1793. .cra_name = "authenc(hmac(md5),"
  1794. "ecb(cipher_null))",
  1795. .cra_driver_name = "authenc-hmac-md5-"
  1796. "ecb-cipher_null-caam",
  1797. .cra_blocksize = NULL_BLOCK_SIZE,
  1798. },
  1799. .setkey = aead_setkey,
  1800. .setauthsize = aead_setauthsize,
  1801. .encrypt = aead_encrypt,
  1802. .decrypt = aead_decrypt,
  1803. .ivsize = NULL_IV_SIZE,
  1804. .maxauthsize = MD5_DIGEST_SIZE,
  1805. },
  1806. .caam = {
  1807. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  1808. OP_ALG_AAI_HMAC_PRECOMP,
  1809. },
  1810. },
  1811. {
  1812. .aead = {
  1813. .base = {
  1814. .cra_name = "authenc(hmac(sha1),"
  1815. "ecb(cipher_null))",
  1816. .cra_driver_name = "authenc-hmac-sha1-"
  1817. "ecb-cipher_null-caam",
  1818. .cra_blocksize = NULL_BLOCK_SIZE,
  1819. },
  1820. .setkey = aead_setkey,
  1821. .setauthsize = aead_setauthsize,
  1822. .encrypt = aead_encrypt,
  1823. .decrypt = aead_decrypt,
  1824. .ivsize = NULL_IV_SIZE,
  1825. .maxauthsize = SHA1_DIGEST_SIZE,
  1826. },
  1827. .caam = {
  1828. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  1829. OP_ALG_AAI_HMAC_PRECOMP,
  1830. },
  1831. },
  1832. {
  1833. .aead = {
  1834. .base = {
  1835. .cra_name = "authenc(hmac(sha224),"
  1836. "ecb(cipher_null))",
  1837. .cra_driver_name = "authenc-hmac-sha224-"
  1838. "ecb-cipher_null-caam",
  1839. .cra_blocksize = NULL_BLOCK_SIZE,
  1840. },
  1841. .setkey = aead_setkey,
  1842. .setauthsize = aead_setauthsize,
  1843. .encrypt = aead_encrypt,
  1844. .decrypt = aead_decrypt,
  1845. .ivsize = NULL_IV_SIZE,
  1846. .maxauthsize = SHA224_DIGEST_SIZE,
  1847. },
  1848. .caam = {
  1849. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  1850. OP_ALG_AAI_HMAC_PRECOMP,
  1851. },
  1852. },
  1853. {
  1854. .aead = {
  1855. .base = {
  1856. .cra_name = "authenc(hmac(sha256),"
  1857. "ecb(cipher_null))",
  1858. .cra_driver_name = "authenc-hmac-sha256-"
  1859. "ecb-cipher_null-caam",
  1860. .cra_blocksize = NULL_BLOCK_SIZE,
  1861. },
  1862. .setkey = aead_setkey,
  1863. .setauthsize = aead_setauthsize,
  1864. .encrypt = aead_encrypt,
  1865. .decrypt = aead_decrypt,
  1866. .ivsize = NULL_IV_SIZE,
  1867. .maxauthsize = SHA256_DIGEST_SIZE,
  1868. },
  1869. .caam = {
  1870. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  1871. OP_ALG_AAI_HMAC_PRECOMP,
  1872. },
  1873. },
  1874. {
  1875. .aead = {
  1876. .base = {
  1877. .cra_name = "authenc(hmac(sha384),"
  1878. "ecb(cipher_null))",
  1879. .cra_driver_name = "authenc-hmac-sha384-"
  1880. "ecb-cipher_null-caam",
  1881. .cra_blocksize = NULL_BLOCK_SIZE,
  1882. },
  1883. .setkey = aead_setkey,
  1884. .setauthsize = aead_setauthsize,
  1885. .encrypt = aead_encrypt,
  1886. .decrypt = aead_decrypt,
  1887. .ivsize = NULL_IV_SIZE,
  1888. .maxauthsize = SHA384_DIGEST_SIZE,
  1889. },
  1890. .caam = {
  1891. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  1892. OP_ALG_AAI_HMAC_PRECOMP,
  1893. },
  1894. },
  1895. {
  1896. .aead = {
  1897. .base = {
  1898. .cra_name = "authenc(hmac(sha512),"
  1899. "ecb(cipher_null))",
  1900. .cra_driver_name = "authenc-hmac-sha512-"
  1901. "ecb-cipher_null-caam",
  1902. .cra_blocksize = NULL_BLOCK_SIZE,
  1903. },
  1904. .setkey = aead_setkey,
  1905. .setauthsize = aead_setauthsize,
  1906. .encrypt = aead_encrypt,
  1907. .decrypt = aead_decrypt,
  1908. .ivsize = NULL_IV_SIZE,
  1909. .maxauthsize = SHA512_DIGEST_SIZE,
  1910. },
  1911. .caam = {
  1912. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  1913. OP_ALG_AAI_HMAC_PRECOMP,
  1914. },
  1915. },
  1916. {
  1917. .aead = {
  1918. .base = {
  1919. .cra_name = "authenc(hmac(md5),cbc(aes))",
  1920. .cra_driver_name = "authenc-hmac-md5-"
  1921. "cbc-aes-caam",
  1922. .cra_blocksize = AES_BLOCK_SIZE,
  1923. },
  1924. .setkey = aead_setkey,
  1925. .setauthsize = aead_setauthsize,
  1926. .encrypt = aead_encrypt,
  1927. .decrypt = aead_decrypt,
  1928. .ivsize = AES_BLOCK_SIZE,
  1929. .maxauthsize = MD5_DIGEST_SIZE,
  1930. },
  1931. .caam = {
  1932. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1933. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  1934. OP_ALG_AAI_HMAC_PRECOMP,
  1935. },
  1936. },
  1937. {
  1938. .aead = {
  1939. .base = {
  1940. .cra_name = "echainiv(authenc(hmac(md5),"
  1941. "cbc(aes)))",
  1942. .cra_driver_name = "echainiv-authenc-hmac-md5-"
  1943. "cbc-aes-caam",
  1944. .cra_blocksize = AES_BLOCK_SIZE,
  1945. },
  1946. .setkey = aead_setkey,
  1947. .setauthsize = aead_setauthsize,
  1948. .encrypt = aead_encrypt,
  1949. .decrypt = aead_decrypt,
  1950. .ivsize = AES_BLOCK_SIZE,
  1951. .maxauthsize = MD5_DIGEST_SIZE,
  1952. },
  1953. .caam = {
  1954. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1955. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  1956. OP_ALG_AAI_HMAC_PRECOMP,
  1957. .geniv = true,
  1958. },
  1959. },
  1960. {
  1961. .aead = {
  1962. .base = {
  1963. .cra_name = "authenc(hmac(sha1),cbc(aes))",
  1964. .cra_driver_name = "authenc-hmac-sha1-"
  1965. "cbc-aes-caam",
  1966. .cra_blocksize = AES_BLOCK_SIZE,
  1967. },
  1968. .setkey = aead_setkey,
  1969. .setauthsize = aead_setauthsize,
  1970. .encrypt = aead_encrypt,
  1971. .decrypt = aead_decrypt,
  1972. .ivsize = AES_BLOCK_SIZE,
  1973. .maxauthsize = SHA1_DIGEST_SIZE,
  1974. },
  1975. .caam = {
  1976. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1977. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  1978. OP_ALG_AAI_HMAC_PRECOMP,
  1979. },
  1980. },
  1981. {
  1982. .aead = {
  1983. .base = {
  1984. .cra_name = "echainiv(authenc(hmac(sha1),"
  1985. "cbc(aes)))",
  1986. .cra_driver_name = "echainiv-authenc-"
  1987. "hmac-sha1-cbc-aes-caam",
  1988. .cra_blocksize = AES_BLOCK_SIZE,
  1989. },
  1990. .setkey = aead_setkey,
  1991. .setauthsize = aead_setauthsize,
  1992. .encrypt = aead_encrypt,
  1993. .decrypt = aead_decrypt,
  1994. .ivsize = AES_BLOCK_SIZE,
  1995. .maxauthsize = SHA1_DIGEST_SIZE,
  1996. },
  1997. .caam = {
  1998. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1999. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2000. OP_ALG_AAI_HMAC_PRECOMP,
  2001. .geniv = true,
  2002. },
  2003. },
  2004. {
  2005. .aead = {
  2006. .base = {
  2007. .cra_name = "authenc(hmac(sha224),cbc(aes))",
  2008. .cra_driver_name = "authenc-hmac-sha224-"
  2009. "cbc-aes-caam",
  2010. .cra_blocksize = AES_BLOCK_SIZE,
  2011. },
  2012. .setkey = aead_setkey,
  2013. .setauthsize = aead_setauthsize,
  2014. .encrypt = aead_encrypt,
  2015. .decrypt = aead_decrypt,
  2016. .ivsize = AES_BLOCK_SIZE,
  2017. .maxauthsize = SHA224_DIGEST_SIZE,
  2018. },
  2019. .caam = {
  2020. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2021. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2022. OP_ALG_AAI_HMAC_PRECOMP,
  2023. },
  2024. },
  2025. {
  2026. .aead = {
  2027. .base = {
  2028. .cra_name = "echainiv(authenc(hmac(sha224),"
  2029. "cbc(aes)))",
  2030. .cra_driver_name = "echainiv-authenc-"
  2031. "hmac-sha224-cbc-aes-caam",
  2032. .cra_blocksize = AES_BLOCK_SIZE,
  2033. },
  2034. .setkey = aead_setkey,
  2035. .setauthsize = aead_setauthsize,
  2036. .encrypt = aead_encrypt,
  2037. .decrypt = aead_decrypt,
  2038. .ivsize = AES_BLOCK_SIZE,
  2039. .maxauthsize = SHA224_DIGEST_SIZE,
  2040. },
  2041. .caam = {
  2042. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2043. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2044. OP_ALG_AAI_HMAC_PRECOMP,
  2045. .geniv = true,
  2046. },
  2047. },
  2048. {
  2049. .aead = {
  2050. .base = {
  2051. .cra_name = "authenc(hmac(sha256),cbc(aes))",
  2052. .cra_driver_name = "authenc-hmac-sha256-"
  2053. "cbc-aes-caam",
  2054. .cra_blocksize = AES_BLOCK_SIZE,
  2055. },
  2056. .setkey = aead_setkey,
  2057. .setauthsize = aead_setauthsize,
  2058. .encrypt = aead_encrypt,
  2059. .decrypt = aead_decrypt,
  2060. .ivsize = AES_BLOCK_SIZE,
  2061. .maxauthsize = SHA256_DIGEST_SIZE,
  2062. },
  2063. .caam = {
  2064. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2065. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2066. OP_ALG_AAI_HMAC_PRECOMP,
  2067. },
  2068. },
  2069. {
  2070. .aead = {
  2071. .base = {
  2072. .cra_name = "echainiv(authenc(hmac(sha256),"
  2073. "cbc(aes)))",
  2074. .cra_driver_name = "echainiv-authenc-"
  2075. "hmac-sha256-cbc-aes-caam",
  2076. .cra_blocksize = AES_BLOCK_SIZE,
  2077. },
  2078. .setkey = aead_setkey,
  2079. .setauthsize = aead_setauthsize,
  2080. .encrypt = aead_encrypt,
  2081. .decrypt = aead_decrypt,
  2082. .ivsize = AES_BLOCK_SIZE,
  2083. .maxauthsize = SHA256_DIGEST_SIZE,
  2084. },
  2085. .caam = {
  2086. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2087. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2088. OP_ALG_AAI_HMAC_PRECOMP,
  2089. .geniv = true,
  2090. },
  2091. },
  2092. {
  2093. .aead = {
  2094. .base = {
  2095. .cra_name = "authenc(hmac(sha384),cbc(aes))",
  2096. .cra_driver_name = "authenc-hmac-sha384-"
  2097. "cbc-aes-caam",
  2098. .cra_blocksize = AES_BLOCK_SIZE,
  2099. },
  2100. .setkey = aead_setkey,
  2101. .setauthsize = aead_setauthsize,
  2102. .encrypt = aead_encrypt,
  2103. .decrypt = aead_decrypt,
  2104. .ivsize = AES_BLOCK_SIZE,
  2105. .maxauthsize = SHA384_DIGEST_SIZE,
  2106. },
  2107. .caam = {
  2108. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2109. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2110. OP_ALG_AAI_HMAC_PRECOMP,
  2111. },
  2112. },
  2113. {
  2114. .aead = {
  2115. .base = {
  2116. .cra_name = "echainiv(authenc(hmac(sha384),"
  2117. "cbc(aes)))",
  2118. .cra_driver_name = "echainiv-authenc-"
  2119. "hmac-sha384-cbc-aes-caam",
  2120. .cra_blocksize = AES_BLOCK_SIZE,
  2121. },
  2122. .setkey = aead_setkey,
  2123. .setauthsize = aead_setauthsize,
  2124. .encrypt = aead_encrypt,
  2125. .decrypt = aead_decrypt,
  2126. .ivsize = AES_BLOCK_SIZE,
  2127. .maxauthsize = SHA384_DIGEST_SIZE,
  2128. },
  2129. .caam = {
  2130. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2131. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2132. OP_ALG_AAI_HMAC_PRECOMP,
  2133. .geniv = true,
  2134. },
  2135. },
  2136. {
  2137. .aead = {
  2138. .base = {
  2139. .cra_name = "authenc(hmac(sha512),cbc(aes))",
  2140. .cra_driver_name = "authenc-hmac-sha512-"
  2141. "cbc-aes-caam",
  2142. .cra_blocksize = AES_BLOCK_SIZE,
  2143. },
  2144. .setkey = aead_setkey,
  2145. .setauthsize = aead_setauthsize,
  2146. .encrypt = aead_encrypt,
  2147. .decrypt = aead_decrypt,
  2148. .ivsize = AES_BLOCK_SIZE,
  2149. .maxauthsize = SHA512_DIGEST_SIZE,
  2150. },
  2151. .caam = {
  2152. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2153. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2154. OP_ALG_AAI_HMAC_PRECOMP,
  2155. },
  2156. },
  2157. {
  2158. .aead = {
  2159. .base = {
  2160. .cra_name = "echainiv(authenc(hmac(sha512),"
  2161. "cbc(aes)))",
  2162. .cra_driver_name = "echainiv-authenc-"
  2163. "hmac-sha512-cbc-aes-caam",
  2164. .cra_blocksize = AES_BLOCK_SIZE,
  2165. },
  2166. .setkey = aead_setkey,
  2167. .setauthsize = aead_setauthsize,
  2168. .encrypt = aead_encrypt,
  2169. .decrypt = aead_decrypt,
  2170. .ivsize = AES_BLOCK_SIZE,
  2171. .maxauthsize = SHA512_DIGEST_SIZE,
  2172. },
  2173. .caam = {
  2174. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2175. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2176. OP_ALG_AAI_HMAC_PRECOMP,
  2177. .geniv = true,
  2178. },
  2179. },
  2180. {
  2181. .aead = {
  2182. .base = {
  2183. .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
  2184. .cra_driver_name = "authenc-hmac-md5-"
  2185. "cbc-des3_ede-caam",
  2186. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2187. },
  2188. .setkey = des3_aead_setkey,
  2189. .setauthsize = aead_setauthsize,
  2190. .encrypt = aead_encrypt,
  2191. .decrypt = aead_decrypt,
  2192. .ivsize = DES3_EDE_BLOCK_SIZE,
  2193. .maxauthsize = MD5_DIGEST_SIZE,
  2194. },
  2195. .caam = {
  2196. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2197. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2198. OP_ALG_AAI_HMAC_PRECOMP,
  2199. }
  2200. },
  2201. {
  2202. .aead = {
  2203. .base = {
  2204. .cra_name = "echainiv(authenc(hmac(md5),"
  2205. "cbc(des3_ede)))",
  2206. .cra_driver_name = "echainiv-authenc-hmac-md5-"
  2207. "cbc-des3_ede-caam",
  2208. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2209. },
  2210. .setkey = des3_aead_setkey,
  2211. .setauthsize = aead_setauthsize,
  2212. .encrypt = aead_encrypt,
  2213. .decrypt = aead_decrypt,
  2214. .ivsize = DES3_EDE_BLOCK_SIZE,
  2215. .maxauthsize = MD5_DIGEST_SIZE,
  2216. },
  2217. .caam = {
  2218. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2219. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2220. OP_ALG_AAI_HMAC_PRECOMP,
  2221. .geniv = true,
  2222. }
  2223. },
  2224. {
  2225. .aead = {
  2226. .base = {
  2227. .cra_name = "authenc(hmac(sha1),"
  2228. "cbc(des3_ede))",
  2229. .cra_driver_name = "authenc-hmac-sha1-"
  2230. "cbc-des3_ede-caam",
  2231. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2232. },
  2233. .setkey = des3_aead_setkey,
  2234. .setauthsize = aead_setauthsize,
  2235. .encrypt = aead_encrypt,
  2236. .decrypt = aead_decrypt,
  2237. .ivsize = DES3_EDE_BLOCK_SIZE,
  2238. .maxauthsize = SHA1_DIGEST_SIZE,
  2239. },
  2240. .caam = {
  2241. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2242. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2243. OP_ALG_AAI_HMAC_PRECOMP,
  2244. },
  2245. },
  2246. {
  2247. .aead = {
  2248. .base = {
  2249. .cra_name = "echainiv(authenc(hmac(sha1),"
  2250. "cbc(des3_ede)))",
  2251. .cra_driver_name = "echainiv-authenc-"
  2252. "hmac-sha1-"
  2253. "cbc-des3_ede-caam",
  2254. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2255. },
  2256. .setkey = des3_aead_setkey,
  2257. .setauthsize = aead_setauthsize,
  2258. .encrypt = aead_encrypt,
  2259. .decrypt = aead_decrypt,
  2260. .ivsize = DES3_EDE_BLOCK_SIZE,
  2261. .maxauthsize = SHA1_DIGEST_SIZE,
  2262. },
  2263. .caam = {
  2264. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2265. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2266. OP_ALG_AAI_HMAC_PRECOMP,
  2267. .geniv = true,
  2268. },
  2269. },
  2270. {
  2271. .aead = {
  2272. .base = {
  2273. .cra_name = "authenc(hmac(sha224),"
  2274. "cbc(des3_ede))",
  2275. .cra_driver_name = "authenc-hmac-sha224-"
  2276. "cbc-des3_ede-caam",
  2277. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2278. },
  2279. .setkey = des3_aead_setkey,
  2280. .setauthsize = aead_setauthsize,
  2281. .encrypt = aead_encrypt,
  2282. .decrypt = aead_decrypt,
  2283. .ivsize = DES3_EDE_BLOCK_SIZE,
  2284. .maxauthsize = SHA224_DIGEST_SIZE,
  2285. },
  2286. .caam = {
  2287. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2288. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2289. OP_ALG_AAI_HMAC_PRECOMP,
  2290. },
  2291. },
  2292. {
  2293. .aead = {
  2294. .base = {
  2295. .cra_name = "echainiv(authenc(hmac(sha224),"
  2296. "cbc(des3_ede)))",
  2297. .cra_driver_name = "echainiv-authenc-"
  2298. "hmac-sha224-"
  2299. "cbc-des3_ede-caam",
  2300. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2301. },
  2302. .setkey = des3_aead_setkey,
  2303. .setauthsize = aead_setauthsize,
  2304. .encrypt = aead_encrypt,
  2305. .decrypt = aead_decrypt,
  2306. .ivsize = DES3_EDE_BLOCK_SIZE,
  2307. .maxauthsize = SHA224_DIGEST_SIZE,
  2308. },
  2309. .caam = {
  2310. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2311. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2312. OP_ALG_AAI_HMAC_PRECOMP,
  2313. .geniv = true,
  2314. },
  2315. },
  2316. {
  2317. .aead = {
  2318. .base = {
  2319. .cra_name = "authenc(hmac(sha256),"
  2320. "cbc(des3_ede))",
  2321. .cra_driver_name = "authenc-hmac-sha256-"
  2322. "cbc-des3_ede-caam",
  2323. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2324. },
  2325. .setkey = des3_aead_setkey,
  2326. .setauthsize = aead_setauthsize,
  2327. .encrypt = aead_encrypt,
  2328. .decrypt = aead_decrypt,
  2329. .ivsize = DES3_EDE_BLOCK_SIZE,
  2330. .maxauthsize = SHA256_DIGEST_SIZE,
  2331. },
  2332. .caam = {
  2333. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2334. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2335. OP_ALG_AAI_HMAC_PRECOMP,
  2336. },
  2337. },
  2338. {
  2339. .aead = {
  2340. .base = {
  2341. .cra_name = "echainiv(authenc(hmac(sha256),"
  2342. "cbc(des3_ede)))",
  2343. .cra_driver_name = "echainiv-authenc-"
  2344. "hmac-sha256-"
  2345. "cbc-des3_ede-caam",
  2346. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2347. },
  2348. .setkey = des3_aead_setkey,
  2349. .setauthsize = aead_setauthsize,
  2350. .encrypt = aead_encrypt,
  2351. .decrypt = aead_decrypt,
  2352. .ivsize = DES3_EDE_BLOCK_SIZE,
  2353. .maxauthsize = SHA256_DIGEST_SIZE,
  2354. },
  2355. .caam = {
  2356. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2357. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2358. OP_ALG_AAI_HMAC_PRECOMP,
  2359. .geniv = true,
  2360. },
  2361. },
  2362. {
  2363. .aead = {
  2364. .base = {
  2365. .cra_name = "authenc(hmac(sha384),"
  2366. "cbc(des3_ede))",
  2367. .cra_driver_name = "authenc-hmac-sha384-"
  2368. "cbc-des3_ede-caam",
  2369. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2370. },
  2371. .setkey = des3_aead_setkey,
  2372. .setauthsize = aead_setauthsize,
  2373. .encrypt = aead_encrypt,
  2374. .decrypt = aead_decrypt,
  2375. .ivsize = DES3_EDE_BLOCK_SIZE,
  2376. .maxauthsize = SHA384_DIGEST_SIZE,
  2377. },
  2378. .caam = {
  2379. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2380. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2381. OP_ALG_AAI_HMAC_PRECOMP,
  2382. },
  2383. },
  2384. {
  2385. .aead = {
  2386. .base = {
  2387. .cra_name = "echainiv(authenc(hmac(sha384),"
  2388. "cbc(des3_ede)))",
  2389. .cra_driver_name = "echainiv-authenc-"
  2390. "hmac-sha384-"
  2391. "cbc-des3_ede-caam",
  2392. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2393. },
  2394. .setkey = des3_aead_setkey,
  2395. .setauthsize = aead_setauthsize,
  2396. .encrypt = aead_encrypt,
  2397. .decrypt = aead_decrypt,
  2398. .ivsize = DES3_EDE_BLOCK_SIZE,
  2399. .maxauthsize = SHA384_DIGEST_SIZE,
  2400. },
  2401. .caam = {
  2402. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2403. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2404. OP_ALG_AAI_HMAC_PRECOMP,
  2405. .geniv = true,
  2406. },
  2407. },
  2408. {
  2409. .aead = {
  2410. .base = {
  2411. .cra_name = "authenc(hmac(sha512),"
  2412. "cbc(des3_ede))",
  2413. .cra_driver_name = "authenc-hmac-sha512-"
  2414. "cbc-des3_ede-caam",
  2415. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2416. },
  2417. .setkey = des3_aead_setkey,
  2418. .setauthsize = aead_setauthsize,
  2419. .encrypt = aead_encrypt,
  2420. .decrypt = aead_decrypt,
  2421. .ivsize = DES3_EDE_BLOCK_SIZE,
  2422. .maxauthsize = SHA512_DIGEST_SIZE,
  2423. },
  2424. .caam = {
  2425. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2426. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2427. OP_ALG_AAI_HMAC_PRECOMP,
  2428. },
  2429. },
  2430. {
  2431. .aead = {
  2432. .base = {
  2433. .cra_name = "echainiv(authenc(hmac(sha512),"
  2434. "cbc(des3_ede)))",
  2435. .cra_driver_name = "echainiv-authenc-"
  2436. "hmac-sha512-"
  2437. "cbc-des3_ede-caam",
  2438. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2439. },
  2440. .setkey = des3_aead_setkey,
  2441. .setauthsize = aead_setauthsize,
  2442. .encrypt = aead_encrypt,
  2443. .decrypt = aead_decrypt,
  2444. .ivsize = DES3_EDE_BLOCK_SIZE,
  2445. .maxauthsize = SHA512_DIGEST_SIZE,
  2446. },
  2447. .caam = {
  2448. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2449. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2450. OP_ALG_AAI_HMAC_PRECOMP,
  2451. .geniv = true,
  2452. },
  2453. },
  2454. {
  2455. .aead = {
  2456. .base = {
  2457. .cra_name = "authenc(hmac(md5),cbc(des))",
  2458. .cra_driver_name = "authenc-hmac-md5-"
  2459. "cbc-des-caam",
  2460. .cra_blocksize = DES_BLOCK_SIZE,
  2461. },
  2462. .setkey = aead_setkey,
  2463. .setauthsize = aead_setauthsize,
  2464. .encrypt = aead_encrypt,
  2465. .decrypt = aead_decrypt,
  2466. .ivsize = DES_BLOCK_SIZE,
  2467. .maxauthsize = MD5_DIGEST_SIZE,
  2468. },
  2469. .caam = {
  2470. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2471. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2472. OP_ALG_AAI_HMAC_PRECOMP,
  2473. },
  2474. },
  2475. {
  2476. .aead = {
  2477. .base = {
  2478. .cra_name = "echainiv(authenc(hmac(md5),"
  2479. "cbc(des)))",
  2480. .cra_driver_name = "echainiv-authenc-hmac-md5-"
  2481. "cbc-des-caam",
  2482. .cra_blocksize = DES_BLOCK_SIZE,
  2483. },
  2484. .setkey = aead_setkey,
  2485. .setauthsize = aead_setauthsize,
  2486. .encrypt = aead_encrypt,
  2487. .decrypt = aead_decrypt,
  2488. .ivsize = DES_BLOCK_SIZE,
  2489. .maxauthsize = MD5_DIGEST_SIZE,
  2490. },
  2491. .caam = {
  2492. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2493. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2494. OP_ALG_AAI_HMAC_PRECOMP,
  2495. .geniv = true,
  2496. },
  2497. },
  2498. {
  2499. .aead = {
  2500. .base = {
  2501. .cra_name = "authenc(hmac(sha1),cbc(des))",
  2502. .cra_driver_name = "authenc-hmac-sha1-"
  2503. "cbc-des-caam",
  2504. .cra_blocksize = DES_BLOCK_SIZE,
  2505. },
  2506. .setkey = aead_setkey,
  2507. .setauthsize = aead_setauthsize,
  2508. .encrypt = aead_encrypt,
  2509. .decrypt = aead_decrypt,
  2510. .ivsize = DES_BLOCK_SIZE,
  2511. .maxauthsize = SHA1_DIGEST_SIZE,
  2512. },
  2513. .caam = {
  2514. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2515. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2516. OP_ALG_AAI_HMAC_PRECOMP,
  2517. },
  2518. },
  2519. {
  2520. .aead = {
  2521. .base = {
  2522. .cra_name = "echainiv(authenc(hmac(sha1),"
  2523. "cbc(des)))",
  2524. .cra_driver_name = "echainiv-authenc-"
  2525. "hmac-sha1-cbc-des-caam",
  2526. .cra_blocksize = DES_BLOCK_SIZE,
  2527. },
  2528. .setkey = aead_setkey,
  2529. .setauthsize = aead_setauthsize,
  2530. .encrypt = aead_encrypt,
  2531. .decrypt = aead_decrypt,
  2532. .ivsize = DES_BLOCK_SIZE,
  2533. .maxauthsize = SHA1_DIGEST_SIZE,
  2534. },
  2535. .caam = {
  2536. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2537. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2538. OP_ALG_AAI_HMAC_PRECOMP,
  2539. .geniv = true,
  2540. },
  2541. },
  2542. {
  2543. .aead = {
  2544. .base = {
  2545. .cra_name = "authenc(hmac(sha224),cbc(des))",
  2546. .cra_driver_name = "authenc-hmac-sha224-"
  2547. "cbc-des-caam",
  2548. .cra_blocksize = DES_BLOCK_SIZE,
  2549. },
  2550. .setkey = aead_setkey,
  2551. .setauthsize = aead_setauthsize,
  2552. .encrypt = aead_encrypt,
  2553. .decrypt = aead_decrypt,
  2554. .ivsize = DES_BLOCK_SIZE,
  2555. .maxauthsize = SHA224_DIGEST_SIZE,
  2556. },
  2557. .caam = {
  2558. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2559. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2560. OP_ALG_AAI_HMAC_PRECOMP,
  2561. },
  2562. },
  2563. {
  2564. .aead = {
  2565. .base = {
  2566. .cra_name = "echainiv(authenc(hmac(sha224),"
  2567. "cbc(des)))",
  2568. .cra_driver_name = "echainiv-authenc-"
  2569. "hmac-sha224-cbc-des-caam",
  2570. .cra_blocksize = DES_BLOCK_SIZE,
  2571. },
  2572. .setkey = aead_setkey,
  2573. .setauthsize = aead_setauthsize,
  2574. .encrypt = aead_encrypt,
  2575. .decrypt = aead_decrypt,
  2576. .ivsize = DES_BLOCK_SIZE,
  2577. .maxauthsize = SHA224_DIGEST_SIZE,
  2578. },
  2579. .caam = {
  2580. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2581. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2582. OP_ALG_AAI_HMAC_PRECOMP,
  2583. .geniv = true,
  2584. },
  2585. },
  2586. {
  2587. .aead = {
  2588. .base = {
  2589. .cra_name = "authenc(hmac(sha256),cbc(des))",
  2590. .cra_driver_name = "authenc-hmac-sha256-"
  2591. "cbc-des-caam",
  2592. .cra_blocksize = DES_BLOCK_SIZE,
  2593. },
  2594. .setkey = aead_setkey,
  2595. .setauthsize = aead_setauthsize,
  2596. .encrypt = aead_encrypt,
  2597. .decrypt = aead_decrypt,
  2598. .ivsize = DES_BLOCK_SIZE,
  2599. .maxauthsize = SHA256_DIGEST_SIZE,
  2600. },
  2601. .caam = {
  2602. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2603. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2604. OP_ALG_AAI_HMAC_PRECOMP,
  2605. },
  2606. },
  2607. {
  2608. .aead = {
  2609. .base = {
  2610. .cra_name = "echainiv(authenc(hmac(sha256),"
  2611. "cbc(des)))",
  2612. .cra_driver_name = "echainiv-authenc-"
  2613. "hmac-sha256-cbc-des-caam",
  2614. .cra_blocksize = DES_BLOCK_SIZE,
  2615. },
  2616. .setkey = aead_setkey,
  2617. .setauthsize = aead_setauthsize,
  2618. .encrypt = aead_encrypt,
  2619. .decrypt = aead_decrypt,
  2620. .ivsize = DES_BLOCK_SIZE,
  2621. .maxauthsize = SHA256_DIGEST_SIZE,
  2622. },
  2623. .caam = {
  2624. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2625. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2626. OP_ALG_AAI_HMAC_PRECOMP,
  2627. .geniv = true,
  2628. },
  2629. },
  2630. {
  2631. .aead = {
  2632. .base = {
  2633. .cra_name = "authenc(hmac(sha384),cbc(des))",
  2634. .cra_driver_name = "authenc-hmac-sha384-"
  2635. "cbc-des-caam",
  2636. .cra_blocksize = DES_BLOCK_SIZE,
  2637. },
  2638. .setkey = aead_setkey,
  2639. .setauthsize = aead_setauthsize,
  2640. .encrypt = aead_encrypt,
  2641. .decrypt = aead_decrypt,
  2642. .ivsize = DES_BLOCK_SIZE,
  2643. .maxauthsize = SHA384_DIGEST_SIZE,
  2644. },
  2645. .caam = {
  2646. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2647. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2648. OP_ALG_AAI_HMAC_PRECOMP,
  2649. },
  2650. },
  2651. {
  2652. .aead = {
  2653. .base = {
  2654. .cra_name = "echainiv(authenc(hmac(sha384),"
  2655. "cbc(des)))",
  2656. .cra_driver_name = "echainiv-authenc-"
  2657. "hmac-sha384-cbc-des-caam",
  2658. .cra_blocksize = DES_BLOCK_SIZE,
  2659. },
  2660. .setkey = aead_setkey,
  2661. .setauthsize = aead_setauthsize,
  2662. .encrypt = aead_encrypt,
  2663. .decrypt = aead_decrypt,
  2664. .ivsize = DES_BLOCK_SIZE,
  2665. .maxauthsize = SHA384_DIGEST_SIZE,
  2666. },
  2667. .caam = {
  2668. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2669. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2670. OP_ALG_AAI_HMAC_PRECOMP,
  2671. .geniv = true,
  2672. },
  2673. },
  2674. {
  2675. .aead = {
  2676. .base = {
  2677. .cra_name = "authenc(hmac(sha512),cbc(des))",
  2678. .cra_driver_name = "authenc-hmac-sha512-"
  2679. "cbc-des-caam",
  2680. .cra_blocksize = DES_BLOCK_SIZE,
  2681. },
  2682. .setkey = aead_setkey,
  2683. .setauthsize = aead_setauthsize,
  2684. .encrypt = aead_encrypt,
  2685. .decrypt = aead_decrypt,
  2686. .ivsize = DES_BLOCK_SIZE,
  2687. .maxauthsize = SHA512_DIGEST_SIZE,
  2688. },
  2689. .caam = {
  2690. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2691. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2692. OP_ALG_AAI_HMAC_PRECOMP,
  2693. },
  2694. },
  2695. {
  2696. .aead = {
  2697. .base = {
  2698. .cra_name = "echainiv(authenc(hmac(sha512),"
  2699. "cbc(des)))",
  2700. .cra_driver_name = "echainiv-authenc-"
  2701. "hmac-sha512-cbc-des-caam",
  2702. .cra_blocksize = DES_BLOCK_SIZE,
  2703. },
  2704. .setkey = aead_setkey,
  2705. .setauthsize = aead_setauthsize,
  2706. .encrypt = aead_encrypt,
  2707. .decrypt = aead_decrypt,
  2708. .ivsize = DES_BLOCK_SIZE,
  2709. .maxauthsize = SHA512_DIGEST_SIZE,
  2710. },
  2711. .caam = {
  2712. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2713. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2714. OP_ALG_AAI_HMAC_PRECOMP,
  2715. .geniv = true,
  2716. },
  2717. },
  2718. {
  2719. .aead = {
  2720. .base = {
  2721. .cra_name = "authenc(hmac(md5),"
  2722. "rfc3686(ctr(aes)))",
  2723. .cra_driver_name = "authenc-hmac-md5-"
  2724. "rfc3686-ctr-aes-caam",
  2725. .cra_blocksize = 1,
  2726. },
  2727. .setkey = aead_setkey,
  2728. .setauthsize = aead_setauthsize,
  2729. .encrypt = aead_encrypt,
  2730. .decrypt = aead_decrypt,
  2731. .ivsize = CTR_RFC3686_IV_SIZE,
  2732. .maxauthsize = MD5_DIGEST_SIZE,
  2733. },
  2734. .caam = {
  2735. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2736. OP_ALG_AAI_CTR_MOD128,
  2737. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2738. OP_ALG_AAI_HMAC_PRECOMP,
  2739. .rfc3686 = true,
  2740. },
  2741. },
  2742. {
  2743. .aead = {
  2744. .base = {
  2745. .cra_name = "seqiv(authenc("
  2746. "hmac(md5),rfc3686(ctr(aes))))",
  2747. .cra_driver_name = "seqiv-authenc-hmac-md5-"
  2748. "rfc3686-ctr-aes-caam",
  2749. .cra_blocksize = 1,
  2750. },
  2751. .setkey = aead_setkey,
  2752. .setauthsize = aead_setauthsize,
  2753. .encrypt = aead_encrypt,
  2754. .decrypt = aead_decrypt,
  2755. .ivsize = CTR_RFC3686_IV_SIZE,
  2756. .maxauthsize = MD5_DIGEST_SIZE,
  2757. },
  2758. .caam = {
  2759. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2760. OP_ALG_AAI_CTR_MOD128,
  2761. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2762. OP_ALG_AAI_HMAC_PRECOMP,
  2763. .rfc3686 = true,
  2764. .geniv = true,
  2765. },
  2766. },
  2767. {
  2768. .aead = {
  2769. .base = {
  2770. .cra_name = "authenc(hmac(sha1),"
  2771. "rfc3686(ctr(aes)))",
  2772. .cra_driver_name = "authenc-hmac-sha1-"
  2773. "rfc3686-ctr-aes-caam",
  2774. .cra_blocksize = 1,
  2775. },
  2776. .setkey = aead_setkey,
  2777. .setauthsize = aead_setauthsize,
  2778. .encrypt = aead_encrypt,
  2779. .decrypt = aead_decrypt,
  2780. .ivsize = CTR_RFC3686_IV_SIZE,
  2781. .maxauthsize = SHA1_DIGEST_SIZE,
  2782. },
  2783. .caam = {
  2784. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2785. OP_ALG_AAI_CTR_MOD128,
  2786. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2787. OP_ALG_AAI_HMAC_PRECOMP,
  2788. .rfc3686 = true,
  2789. },
  2790. },
  2791. {
  2792. .aead = {
  2793. .base = {
  2794. .cra_name = "seqiv(authenc("
  2795. "hmac(sha1),rfc3686(ctr(aes))))",
  2796. .cra_driver_name = "seqiv-authenc-hmac-sha1-"
  2797. "rfc3686-ctr-aes-caam",
  2798. .cra_blocksize = 1,
  2799. },
  2800. .setkey = aead_setkey,
  2801. .setauthsize = aead_setauthsize,
  2802. .encrypt = aead_encrypt,
  2803. .decrypt = aead_decrypt,
  2804. .ivsize = CTR_RFC3686_IV_SIZE,
  2805. .maxauthsize = SHA1_DIGEST_SIZE,
  2806. },
  2807. .caam = {
  2808. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2809. OP_ALG_AAI_CTR_MOD128,
  2810. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2811. OP_ALG_AAI_HMAC_PRECOMP,
  2812. .rfc3686 = true,
  2813. .geniv = true,
  2814. },
  2815. },
  2816. {
  2817. .aead = {
  2818. .base = {
  2819. .cra_name = "authenc(hmac(sha224),"
  2820. "rfc3686(ctr(aes)))",
  2821. .cra_driver_name = "authenc-hmac-sha224-"
  2822. "rfc3686-ctr-aes-caam",
  2823. .cra_blocksize = 1,
  2824. },
  2825. .setkey = aead_setkey,
  2826. .setauthsize = aead_setauthsize,
  2827. .encrypt = aead_encrypt,
  2828. .decrypt = aead_decrypt,
  2829. .ivsize = CTR_RFC3686_IV_SIZE,
  2830. .maxauthsize = SHA224_DIGEST_SIZE,
  2831. },
  2832. .caam = {
  2833. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2834. OP_ALG_AAI_CTR_MOD128,
  2835. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2836. OP_ALG_AAI_HMAC_PRECOMP,
  2837. .rfc3686 = true,
  2838. },
  2839. },
  2840. {
  2841. .aead = {
  2842. .base = {
  2843. .cra_name = "seqiv(authenc("
  2844. "hmac(sha224),rfc3686(ctr(aes))))",
  2845. .cra_driver_name = "seqiv-authenc-hmac-sha224-"
  2846. "rfc3686-ctr-aes-caam",
  2847. .cra_blocksize = 1,
  2848. },
  2849. .setkey = aead_setkey,
  2850. .setauthsize = aead_setauthsize,
  2851. .encrypt = aead_encrypt,
  2852. .decrypt = aead_decrypt,
  2853. .ivsize = CTR_RFC3686_IV_SIZE,
  2854. .maxauthsize = SHA224_DIGEST_SIZE,
  2855. },
  2856. .caam = {
  2857. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2858. OP_ALG_AAI_CTR_MOD128,
  2859. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2860. OP_ALG_AAI_HMAC_PRECOMP,
  2861. .rfc3686 = true,
  2862. .geniv = true,
  2863. },
  2864. },
  2865. {
  2866. .aead = {
  2867. .base = {
  2868. .cra_name = "authenc(hmac(sha256),"
  2869. "rfc3686(ctr(aes)))",
  2870. .cra_driver_name = "authenc-hmac-sha256-"
  2871. "rfc3686-ctr-aes-caam",
  2872. .cra_blocksize = 1,
  2873. },
  2874. .setkey = aead_setkey,
  2875. .setauthsize = aead_setauthsize,
  2876. .encrypt = aead_encrypt,
  2877. .decrypt = aead_decrypt,
  2878. .ivsize = CTR_RFC3686_IV_SIZE,
  2879. .maxauthsize = SHA256_DIGEST_SIZE,
  2880. },
  2881. .caam = {
  2882. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2883. OP_ALG_AAI_CTR_MOD128,
  2884. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2885. OP_ALG_AAI_HMAC_PRECOMP,
  2886. .rfc3686 = true,
  2887. },
  2888. },
  2889. {
  2890. .aead = {
  2891. .base = {
  2892. .cra_name = "seqiv(authenc(hmac(sha256),"
  2893. "rfc3686(ctr(aes))))",
  2894. .cra_driver_name = "seqiv-authenc-hmac-sha256-"
  2895. "rfc3686-ctr-aes-caam",
  2896. .cra_blocksize = 1,
  2897. },
  2898. .setkey = aead_setkey,
  2899. .setauthsize = aead_setauthsize,
  2900. .encrypt = aead_encrypt,
  2901. .decrypt = aead_decrypt,
  2902. .ivsize = CTR_RFC3686_IV_SIZE,
  2903. .maxauthsize = SHA256_DIGEST_SIZE,
  2904. },
  2905. .caam = {
  2906. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2907. OP_ALG_AAI_CTR_MOD128,
  2908. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2909. OP_ALG_AAI_HMAC_PRECOMP,
  2910. .rfc3686 = true,
  2911. .geniv = true,
  2912. },
  2913. },
  2914. {
  2915. .aead = {
  2916. .base = {
  2917. .cra_name = "authenc(hmac(sha384),"
  2918. "rfc3686(ctr(aes)))",
  2919. .cra_driver_name = "authenc-hmac-sha384-"
  2920. "rfc3686-ctr-aes-caam",
  2921. .cra_blocksize = 1,
  2922. },
  2923. .setkey = aead_setkey,
  2924. .setauthsize = aead_setauthsize,
  2925. .encrypt = aead_encrypt,
  2926. .decrypt = aead_decrypt,
  2927. .ivsize = CTR_RFC3686_IV_SIZE,
  2928. .maxauthsize = SHA384_DIGEST_SIZE,
  2929. },
  2930. .caam = {
  2931. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2932. OP_ALG_AAI_CTR_MOD128,
  2933. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2934. OP_ALG_AAI_HMAC_PRECOMP,
  2935. .rfc3686 = true,
  2936. },
  2937. },
  2938. {
  2939. .aead = {
  2940. .base = {
  2941. .cra_name = "seqiv(authenc(hmac(sha384),"
  2942. "rfc3686(ctr(aes))))",
  2943. .cra_driver_name = "seqiv-authenc-hmac-sha384-"
  2944. "rfc3686-ctr-aes-caam",
  2945. .cra_blocksize = 1,
  2946. },
  2947. .setkey = aead_setkey,
  2948. .setauthsize = aead_setauthsize,
  2949. .encrypt = aead_encrypt,
  2950. .decrypt = aead_decrypt,
  2951. .ivsize = CTR_RFC3686_IV_SIZE,
  2952. .maxauthsize = SHA384_DIGEST_SIZE,
  2953. },
  2954. .caam = {
  2955. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2956. OP_ALG_AAI_CTR_MOD128,
  2957. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2958. OP_ALG_AAI_HMAC_PRECOMP,
  2959. .rfc3686 = true,
  2960. .geniv = true,
  2961. },
  2962. },
  2963. {
  2964. .aead = {
  2965. .base = {
  2966. .cra_name = "authenc(hmac(sha512),"
  2967. "rfc3686(ctr(aes)))",
  2968. .cra_driver_name = "authenc-hmac-sha512-"
  2969. "rfc3686-ctr-aes-caam",
  2970. .cra_blocksize = 1,
  2971. },
  2972. .setkey = aead_setkey,
  2973. .setauthsize = aead_setauthsize,
  2974. .encrypt = aead_encrypt,
  2975. .decrypt = aead_decrypt,
  2976. .ivsize = CTR_RFC3686_IV_SIZE,
  2977. .maxauthsize = SHA512_DIGEST_SIZE,
  2978. },
  2979. .caam = {
  2980. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2981. OP_ALG_AAI_CTR_MOD128,
  2982. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2983. OP_ALG_AAI_HMAC_PRECOMP,
  2984. .rfc3686 = true,
  2985. },
  2986. },
  2987. {
  2988. .aead = {
  2989. .base = {
  2990. .cra_name = "seqiv(authenc(hmac(sha512),"
  2991. "rfc3686(ctr(aes))))",
  2992. .cra_driver_name = "seqiv-authenc-hmac-sha512-"
  2993. "rfc3686-ctr-aes-caam",
  2994. .cra_blocksize = 1,
  2995. },
  2996. .setkey = aead_setkey,
  2997. .setauthsize = aead_setauthsize,
  2998. .encrypt = aead_encrypt,
  2999. .decrypt = aead_decrypt,
  3000. .ivsize = CTR_RFC3686_IV_SIZE,
  3001. .maxauthsize = SHA512_DIGEST_SIZE,
  3002. },
  3003. .caam = {
  3004. .class1_alg_type = OP_ALG_ALGSEL_AES |
  3005. OP_ALG_AAI_CTR_MOD128,
  3006. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  3007. OP_ALG_AAI_HMAC_PRECOMP,
  3008. .rfc3686 = true,
  3009. .geniv = true,
  3010. },
  3011. },
  3012. {
  3013. .aead = {
  3014. .base = {
  3015. .cra_name = "rfc7539(chacha20,poly1305)",
  3016. .cra_driver_name = "rfc7539-chacha20-poly1305-"
  3017. "caam",
  3018. .cra_blocksize = 1,
  3019. },
  3020. .setkey = chachapoly_setkey,
  3021. .setauthsize = chachapoly_setauthsize,
  3022. .encrypt = chachapoly_encrypt,
  3023. .decrypt = chachapoly_decrypt,
  3024. .ivsize = CHACHAPOLY_IV_SIZE,
  3025. .maxauthsize = POLY1305_DIGEST_SIZE,
  3026. },
  3027. .caam = {
  3028. .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
  3029. OP_ALG_AAI_AEAD,
  3030. .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
  3031. OP_ALG_AAI_AEAD,
  3032. .nodkp = true,
  3033. },
  3034. },
  3035. {
  3036. .aead = {
  3037. .base = {
  3038. .cra_name = "rfc7539esp(chacha20,poly1305)",
  3039. .cra_driver_name = "rfc7539esp-chacha20-"
  3040. "poly1305-caam",
  3041. .cra_blocksize = 1,
  3042. },
  3043. .setkey = chachapoly_setkey,
  3044. .setauthsize = chachapoly_setauthsize,
  3045. .encrypt = chachapoly_encrypt,
  3046. .decrypt = chachapoly_decrypt,
  3047. .ivsize = 8,
  3048. .maxauthsize = POLY1305_DIGEST_SIZE,
  3049. },
  3050. .caam = {
  3051. .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
  3052. OP_ALG_AAI_AEAD,
  3053. .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
  3054. OP_ALG_AAI_AEAD,
  3055. .nodkp = true,
  3056. },
  3057. },
  3058. };
  3059. static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
  3060. bool uses_dkp)
  3061. {
  3062. dma_addr_t dma_addr;
  3063. struct caam_drv_private *priv;
  3064. const size_t sh_desc_enc_offset = offsetof(struct caam_ctx,
  3065. sh_desc_enc);
  3066. ctx->jrdev = caam_jr_alloc();
  3067. if (IS_ERR(ctx->jrdev)) {
  3068. pr_err("Job Ring Device allocation for transform failed\n");
  3069. return PTR_ERR(ctx->jrdev);
  3070. }
  3071. priv = dev_get_drvdata(ctx->jrdev->parent);
  3072. if (priv->era >= 6 && uses_dkp)
  3073. ctx->dir = DMA_BIDIRECTIONAL;
  3074. else
  3075. ctx->dir = DMA_TO_DEVICE;
  3076. dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
  3077. offsetof(struct caam_ctx,
  3078. sh_desc_enc_dma) -
  3079. sh_desc_enc_offset,
  3080. ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
  3081. if (dma_mapping_error(ctx->jrdev, dma_addr)) {
  3082. dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
  3083. caam_jr_free(ctx->jrdev);
  3084. return -ENOMEM;
  3085. }
  3086. ctx->sh_desc_enc_dma = dma_addr;
  3087. ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
  3088. sh_desc_dec) -
  3089. sh_desc_enc_offset;
  3090. ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key) -
  3091. sh_desc_enc_offset;
  3092. /* copy descriptor header template value */
  3093. ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
  3094. ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
  3095. return 0;
  3096. }
  3097. static int caam_cra_init(struct crypto_skcipher *tfm)
  3098. {
  3099. struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
  3100. struct caam_skcipher_alg *caam_alg =
  3101. container_of(alg, typeof(*caam_alg), skcipher);
  3102. struct caam_ctx *ctx = crypto_skcipher_ctx(tfm);
  3103. u32 alg_aai = caam_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
  3104. int ret = 0;
  3105. ctx->enginectx.op.do_one_request = skcipher_do_one_req;
  3106. if (alg_aai == OP_ALG_AAI_XTS) {
  3107. const char *tfm_name = crypto_tfm_alg_name(&tfm->base);
  3108. struct crypto_skcipher *fallback;
  3109. fallback = crypto_alloc_skcipher(tfm_name, 0,
  3110. CRYPTO_ALG_NEED_FALLBACK);
  3111. if (IS_ERR(fallback)) {
  3112. pr_err("Failed to allocate %s fallback: %ld\n",
  3113. tfm_name, PTR_ERR(fallback));
  3114. return PTR_ERR(fallback);
  3115. }
  3116. ctx->fallback = fallback;
  3117. crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx) +
  3118. crypto_skcipher_reqsize(fallback));
  3119. } else {
  3120. crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx));
  3121. }
  3122. ret = caam_init_common(ctx, &caam_alg->caam, false);
  3123. if (ret && ctx->fallback)
  3124. crypto_free_skcipher(ctx->fallback);
  3125. return ret;
  3126. }
  3127. static int caam_aead_init(struct crypto_aead *tfm)
  3128. {
  3129. struct aead_alg *alg = crypto_aead_alg(tfm);
  3130. struct caam_aead_alg *caam_alg =
  3131. container_of(alg, struct caam_aead_alg, aead);
  3132. struct caam_ctx *ctx = crypto_aead_ctx(tfm);
  3133. crypto_aead_set_reqsize(tfm, sizeof(struct caam_aead_req_ctx));
  3134. ctx->enginectx.op.do_one_request = aead_do_one_req;
  3135. return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp);
  3136. }
  3137. static void caam_exit_common(struct caam_ctx *ctx)
  3138. {
  3139. dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
  3140. offsetof(struct caam_ctx, sh_desc_enc_dma) -
  3141. offsetof(struct caam_ctx, sh_desc_enc),
  3142. ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
  3143. caam_jr_free(ctx->jrdev);
  3144. }
  3145. static void caam_cra_exit(struct crypto_skcipher *tfm)
  3146. {
  3147. struct caam_ctx *ctx = crypto_skcipher_ctx(tfm);
  3148. if (ctx->fallback)
  3149. crypto_free_skcipher(ctx->fallback);
  3150. caam_exit_common(ctx);
  3151. }
  3152. static void caam_aead_exit(struct crypto_aead *tfm)
  3153. {
  3154. caam_exit_common(crypto_aead_ctx(tfm));
  3155. }
  3156. void caam_algapi_exit(void)
  3157. {
  3158. int i;
  3159. for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
  3160. struct caam_aead_alg *t_alg = driver_aeads + i;
  3161. if (t_alg->registered)
  3162. crypto_unregister_aead(&t_alg->aead);
  3163. }
  3164. for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
  3165. struct caam_skcipher_alg *t_alg = driver_algs + i;
  3166. if (t_alg->registered)
  3167. crypto_unregister_skcipher(&t_alg->skcipher);
  3168. }
  3169. }
  3170. static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
  3171. {
  3172. struct skcipher_alg *alg = &t_alg->skcipher;
  3173. alg->base.cra_module = THIS_MODULE;
  3174. alg->base.cra_priority = CAAM_CRA_PRIORITY;
  3175. alg->base.cra_ctxsize = sizeof(struct caam_ctx);
  3176. alg->base.cra_flags |= (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
  3177. CRYPTO_ALG_KERN_DRIVER_ONLY);
  3178. alg->init = caam_cra_init;
  3179. alg->exit = caam_cra_exit;
  3180. }
  3181. static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
  3182. {
  3183. struct aead_alg *alg = &t_alg->aead;
  3184. alg->base.cra_module = THIS_MODULE;
  3185. alg->base.cra_priority = CAAM_CRA_PRIORITY;
  3186. alg->base.cra_ctxsize = sizeof(struct caam_ctx);
  3187. alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
  3188. CRYPTO_ALG_KERN_DRIVER_ONLY;
  3189. alg->init = caam_aead_init;
  3190. alg->exit = caam_aead_exit;
  3191. }
  3192. int caam_algapi_init(struct device *ctrldev)
  3193. {
  3194. struct caam_drv_private *priv = dev_get_drvdata(ctrldev);
  3195. int i = 0, err = 0;
  3196. u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst;
  3197. unsigned int md_limit = SHA512_DIGEST_SIZE;
  3198. bool registered = false, gcm_support;
  3199. /*
  3200. * Register crypto algorithms the device supports.
  3201. * First, detect presence and attributes of DES, AES, and MD blocks.
  3202. */
  3203. if (priv->era < 10) {
  3204. u32 cha_vid, cha_inst, aes_rn;
  3205. cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
  3206. aes_vid = cha_vid & CHA_ID_LS_AES_MASK;
  3207. md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
  3208. cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
  3209. des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >>
  3210. CHA_ID_LS_DES_SHIFT;
  3211. aes_inst = cha_inst & CHA_ID_LS_AES_MASK;
  3212. md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
  3213. ccha_inst = 0;
  3214. ptha_inst = 0;
  3215. aes_rn = rd_reg32(&priv->ctrl->perfmon.cha_rev_ls) &
  3216. CHA_ID_LS_AES_MASK;
  3217. gcm_support = !(aes_vid == CHA_VER_VID_AES_LP && aes_rn < 8);
  3218. } else {
  3219. u32 aesa, mdha;
  3220. aesa = rd_reg32(&priv->ctrl->vreg.aesa);
  3221. mdha = rd_reg32(&priv->ctrl->vreg.mdha);
  3222. aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
  3223. md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
  3224. des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK;
  3225. aes_inst = aesa & CHA_VER_NUM_MASK;
  3226. md_inst = mdha & CHA_VER_NUM_MASK;
  3227. ccha_inst = rd_reg32(&priv->ctrl->vreg.ccha) & CHA_VER_NUM_MASK;
  3228. ptha_inst = rd_reg32(&priv->ctrl->vreg.ptha) & CHA_VER_NUM_MASK;
  3229. gcm_support = aesa & CHA_VER_MISC_AES_GCM;
  3230. }
  3231. /* If MD is present, limit digest size based on LP256 */
  3232. if (md_inst && md_vid == CHA_VER_VID_MD_LP256)
  3233. md_limit = SHA256_DIGEST_SIZE;
  3234. for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
  3235. struct caam_skcipher_alg *t_alg = driver_algs + i;
  3236. u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
  3237. /* Skip DES algorithms if not supported by device */
  3238. if (!des_inst &&
  3239. ((alg_sel == OP_ALG_ALGSEL_3DES) ||
  3240. (alg_sel == OP_ALG_ALGSEL_DES)))
  3241. continue;
  3242. /* Skip AES algorithms if not supported by device */
  3243. if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
  3244. continue;
  3245. /*
  3246. * Check support for AES modes not available
  3247. * on LP devices.
  3248. */
  3249. if (aes_vid == CHA_VER_VID_AES_LP &&
  3250. (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) ==
  3251. OP_ALG_AAI_XTS)
  3252. continue;
  3253. caam_skcipher_alg_init(t_alg);
  3254. err = crypto_register_skcipher(&t_alg->skcipher);
  3255. if (err) {
  3256. pr_warn("%s alg registration failed\n",
  3257. t_alg->skcipher.base.cra_driver_name);
  3258. continue;
  3259. }
  3260. t_alg->registered = true;
  3261. registered = true;
  3262. }
  3263. for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
  3264. struct caam_aead_alg *t_alg = driver_aeads + i;
  3265. u32 c1_alg_sel = t_alg->caam.class1_alg_type &
  3266. OP_ALG_ALGSEL_MASK;
  3267. u32 c2_alg_sel = t_alg->caam.class2_alg_type &
  3268. OP_ALG_ALGSEL_MASK;
  3269. u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
  3270. /* Skip DES algorithms if not supported by device */
  3271. if (!des_inst &&
  3272. ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
  3273. (c1_alg_sel == OP_ALG_ALGSEL_DES)))
  3274. continue;
  3275. /* Skip AES algorithms if not supported by device */
  3276. if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
  3277. continue;
  3278. /* Skip CHACHA20 algorithms if not supported by device */
  3279. if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst)
  3280. continue;
  3281. /* Skip POLY1305 algorithms if not supported by device */
  3282. if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst)
  3283. continue;
  3284. /* Skip GCM algorithms if not supported by device */
  3285. if (c1_alg_sel == OP_ALG_ALGSEL_AES &&
  3286. alg_aai == OP_ALG_AAI_GCM && !gcm_support)
  3287. continue;
  3288. /*
  3289. * Skip algorithms requiring message digests
  3290. * if MD or MD size is not supported by device.
  3291. */
  3292. if (is_mdha(c2_alg_sel) &&
  3293. (!md_inst || t_alg->aead.maxauthsize > md_limit))
  3294. continue;
  3295. caam_aead_alg_init(t_alg);
  3296. err = crypto_register_aead(&t_alg->aead);
  3297. if (err) {
  3298. pr_warn("%s alg registration failed\n",
  3299. t_alg->aead.base.cra_driver_name);
  3300. continue;
  3301. }
  3302. t_alg->registered = true;
  3303. registered = true;
  3304. }
  3305. if (registered)
  3306. pr_info("caam algorithms registered in /proc/crypto\n");
  3307. return err;
  3308. }