caamalg_desc.c 55 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644
  1. // SPDX-License-Identifier: GPL-2.0+
  2. /*
  3. * Shared descriptors for aead, skcipher algorithms
  4. *
  5. * Copyright 2016-2019 NXP
  6. */
  7. #include "compat.h"
  8. #include "desc_constr.h"
  9. #include "caamalg_desc.h"
  10. /*
  11. * For aead functions, read payload and write payload,
  12. * both of which are specified in req->src and req->dst
  13. */
  14. static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
  15. {
  16. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
  17. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
  18. KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
  19. }
  20. /* Set DK bit in class 1 operation if shared */
  21. static inline void append_dec_op1(u32 *desc, u32 type)
  22. {
  23. u32 *jump_cmd, *uncond_jump_cmd;
  24. /* DK bit is valid only for AES */
  25. if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
  26. append_operation(desc, type | OP_ALG_AS_INITFINAL |
  27. OP_ALG_DECRYPT);
  28. return;
  29. }
  30. jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
  31. append_operation(desc, type | OP_ALG_AS_INIT | OP_ALG_DECRYPT);
  32. uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
  33. set_jump_tgt_here(desc, jump_cmd);
  34. append_operation(desc, type | OP_ALG_AS_INIT | OP_ALG_DECRYPT |
  35. OP_ALG_AAI_DK);
  36. set_jump_tgt_here(desc, uncond_jump_cmd);
  37. }
  38. /**
  39. * cnstr_shdsc_aead_null_encap - IPSec ESP encapsulation shared descriptor
  40. * (non-protocol) with no (null) encryption.
  41. * @desc: pointer to buffer used for descriptor construction
  42. * @adata: pointer to authentication transform definitions.
  43. * A split key is required for SEC Era < 6; the size of the split key
  44. * is specified in this case. Valid algorithm values - one of
  45. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  46. * with OP_ALG_AAI_HMAC_PRECOMP.
  47. * @icvsize: integrity check value (ICV) size (truncated or full)
  48. * @era: SEC Era
  49. */
  50. void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
  51. unsigned int icvsize, int era)
  52. {
  53. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
  54. init_sh_desc(desc, HDR_SHARE_SERIAL);
  55. /* Skip if already shared */
  56. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  57. JUMP_COND_SHRD);
  58. if (era < 6) {
  59. if (adata->key_inline)
  60. append_key_as_imm(desc, adata->key_virt,
  61. adata->keylen_pad, adata->keylen,
  62. CLASS_2 | KEY_DEST_MDHA_SPLIT |
  63. KEY_ENC);
  64. else
  65. append_key(desc, adata->key_dma, adata->keylen,
  66. CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
  67. } else {
  68. append_proto_dkp(desc, adata);
  69. }
  70. set_jump_tgt_here(desc, key_jump_cmd);
  71. /* assoclen + cryptlen = seqinlen */
  72. append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
  73. /* Prepare to read and write cryptlen + assoclen bytes */
  74. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  75. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  76. /*
  77. * MOVE_LEN opcode is not available in all SEC HW revisions,
  78. * thus need to do some magic, i.e. self-patch the descriptor
  79. * buffer.
  80. */
  81. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
  82. MOVE_DEST_MATH3 |
  83. (0x6 << MOVE_LEN_SHIFT));
  84. write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
  85. MOVE_DEST_DESCBUF |
  86. MOVE_WAITCOMP |
  87. (0x8 << MOVE_LEN_SHIFT));
  88. /* Class 2 operation */
  89. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  90. OP_ALG_ENCRYPT);
  91. /* Read and write cryptlen bytes */
  92. aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  93. set_move_tgt_here(desc, read_move_cmd);
  94. set_move_tgt_here(desc, write_move_cmd);
  95. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  96. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
  97. MOVE_AUX_LS);
  98. /* Write ICV */
  99. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  100. LDST_SRCDST_BYTE_CONTEXT);
  101. print_hex_dump_debug("aead null enc shdesc@" __stringify(__LINE__)": ",
  102. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  103. 1);
  104. }
  105. EXPORT_SYMBOL(cnstr_shdsc_aead_null_encap);
  106. /**
  107. * cnstr_shdsc_aead_null_decap - IPSec ESP decapsulation shared descriptor
  108. * (non-protocol) with no (null) decryption.
  109. * @desc: pointer to buffer used for descriptor construction
  110. * @adata: pointer to authentication transform definitions.
  111. * A split key is required for SEC Era < 6; the size of the split key
  112. * is specified in this case. Valid algorithm values - one of
  113. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  114. * with OP_ALG_AAI_HMAC_PRECOMP.
  115. * @icvsize: integrity check value (ICV) size (truncated or full)
  116. * @era: SEC Era
  117. */
  118. void cnstr_shdsc_aead_null_decap(u32 * const desc, struct alginfo *adata,
  119. unsigned int icvsize, int era)
  120. {
  121. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd, *jump_cmd;
  122. init_sh_desc(desc, HDR_SHARE_SERIAL);
  123. /* Skip if already shared */
  124. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  125. JUMP_COND_SHRD);
  126. if (era < 6) {
  127. if (adata->key_inline)
  128. append_key_as_imm(desc, adata->key_virt,
  129. adata->keylen_pad, adata->keylen,
  130. CLASS_2 | KEY_DEST_MDHA_SPLIT |
  131. KEY_ENC);
  132. else
  133. append_key(desc, adata->key_dma, adata->keylen,
  134. CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
  135. } else {
  136. append_proto_dkp(desc, adata);
  137. }
  138. set_jump_tgt_here(desc, key_jump_cmd);
  139. /* Class 2 operation */
  140. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  141. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  142. /* assoclen + cryptlen = seqoutlen */
  143. append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  144. /* Prepare to read and write cryptlen + assoclen bytes */
  145. append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
  146. append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
  147. /*
  148. * MOVE_LEN opcode is not available in all SEC HW revisions,
  149. * thus need to do some magic, i.e. self-patch the descriptor
  150. * buffer.
  151. */
  152. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
  153. MOVE_DEST_MATH2 |
  154. (0x6 << MOVE_LEN_SHIFT));
  155. write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
  156. MOVE_DEST_DESCBUF |
  157. MOVE_WAITCOMP |
  158. (0x8 << MOVE_LEN_SHIFT));
  159. /* Read and write cryptlen bytes */
  160. aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  161. /*
  162. * Insert a NOP here, since we need at least 4 instructions between
  163. * code patching the descriptor buffer and the location being patched.
  164. */
  165. jump_cmd = append_jump(desc, JUMP_TEST_ALL);
  166. set_jump_tgt_here(desc, jump_cmd);
  167. set_move_tgt_here(desc, read_move_cmd);
  168. set_move_tgt_here(desc, write_move_cmd);
  169. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  170. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
  171. MOVE_AUX_LS);
  172. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  173. /* Load ICV */
  174. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
  175. FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
  176. print_hex_dump_debug("aead null dec shdesc@" __stringify(__LINE__)": ",
  177. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  178. 1);
  179. }
  180. EXPORT_SYMBOL(cnstr_shdsc_aead_null_decap);
  181. static void init_sh_desc_key_aead(u32 * const desc,
  182. struct alginfo * const cdata,
  183. struct alginfo * const adata,
  184. const bool is_rfc3686, u32 *nonce, int era)
  185. {
  186. u32 *key_jump_cmd;
  187. unsigned int enckeylen = cdata->keylen;
  188. /* Note: Context registers are saved. */
  189. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  190. /* Skip if already shared */
  191. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  192. JUMP_COND_SHRD);
  193. /*
  194. * RFC3686 specific:
  195. * | key = {AUTH_KEY, ENC_KEY, NONCE}
  196. * | enckeylen = encryption key size + nonce size
  197. */
  198. if (is_rfc3686)
  199. enckeylen -= CTR_RFC3686_NONCE_SIZE;
  200. if (era < 6) {
  201. if (adata->key_inline)
  202. append_key_as_imm(desc, adata->key_virt,
  203. adata->keylen_pad, adata->keylen,
  204. CLASS_2 | KEY_DEST_MDHA_SPLIT |
  205. KEY_ENC);
  206. else
  207. append_key(desc, adata->key_dma, adata->keylen,
  208. CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
  209. } else {
  210. append_proto_dkp(desc, adata);
  211. }
  212. if (cdata->key_inline)
  213. append_key_as_imm(desc, cdata->key_virt, enckeylen,
  214. enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
  215. else
  216. append_key(desc, cdata->key_dma, enckeylen, CLASS_1 |
  217. KEY_DEST_CLASS_REG);
  218. /* Load Counter into CONTEXT1 reg */
  219. if (is_rfc3686) {
  220. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  221. LDST_CLASS_IND_CCB |
  222. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  223. append_move(desc,
  224. MOVE_SRC_OUTFIFO |
  225. MOVE_DEST_CLASS1CTX |
  226. (16 << MOVE_OFFSET_SHIFT) |
  227. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  228. }
  229. set_jump_tgt_here(desc, key_jump_cmd);
  230. }
  231. /**
  232. * cnstr_shdsc_aead_encap - IPSec ESP encapsulation shared descriptor
  233. * (non-protocol).
  234. * @desc: pointer to buffer used for descriptor construction
  235. * @cdata: pointer to block cipher transform definitions
  236. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  237. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  238. * @adata: pointer to authentication transform definitions.
  239. * A split key is required for SEC Era < 6; the size of the split key
  240. * is specified in this case. Valid algorithm values - one of
  241. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  242. * with OP_ALG_AAI_HMAC_PRECOMP.
  243. * @ivsize: initialization vector size
  244. * @icvsize: integrity check value (ICV) size (truncated or full)
  245. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  246. * @nonce: pointer to rfc3686 nonce
  247. * @ctx1_iv_off: IV offset in CONTEXT1 register
  248. * @is_qi: true when called from caam/qi
  249. * @era: SEC Era
  250. */
  251. void cnstr_shdsc_aead_encap(u32 * const desc, struct alginfo *cdata,
  252. struct alginfo *adata, unsigned int ivsize,
  253. unsigned int icvsize, const bool is_rfc3686,
  254. u32 *nonce, const u32 ctx1_iv_off, const bool is_qi,
  255. int era)
  256. {
  257. /* Note: Context registers are saved. */
  258. init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
  259. /* Class 2 operation */
  260. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  261. OP_ALG_ENCRYPT);
  262. if (is_qi) {
  263. u32 *wait_load_cmd;
  264. /* REG3 = assoclen */
  265. append_seq_load(desc, 4, LDST_CLASS_DECO |
  266. LDST_SRCDST_WORD_DECO_MATH3 |
  267. (4 << LDST_OFFSET_SHIFT));
  268. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  269. JUMP_COND_CALM | JUMP_COND_NCP |
  270. JUMP_COND_NOP | JUMP_COND_NIP |
  271. JUMP_COND_NIFP);
  272. set_jump_tgt_here(desc, wait_load_cmd);
  273. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  274. LDST_SRCDST_BYTE_CONTEXT |
  275. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  276. }
  277. /* Read and write assoclen bytes */
  278. if (is_qi || era < 3) {
  279. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  280. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  281. } else {
  282. append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  283. append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  284. }
  285. /* Skip assoc data */
  286. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  287. /* read assoc before reading payload */
  288. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
  289. FIFOLDST_VLF);
  290. /* Load Counter into CONTEXT1 reg */
  291. if (is_rfc3686)
  292. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  293. LDST_SRCDST_BYTE_CONTEXT |
  294. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  295. LDST_OFFSET_SHIFT));
  296. /* Class 1 operation */
  297. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  298. OP_ALG_ENCRYPT);
  299. /* Read and write cryptlen bytes */
  300. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  301. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  302. aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
  303. /* Write ICV */
  304. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  305. LDST_SRCDST_BYTE_CONTEXT);
  306. print_hex_dump_debug("aead enc shdesc@" __stringify(__LINE__)": ",
  307. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  308. 1);
  309. }
  310. EXPORT_SYMBOL(cnstr_shdsc_aead_encap);
  311. /**
  312. * cnstr_shdsc_aead_decap - IPSec ESP decapsulation shared descriptor
  313. * (non-protocol).
  314. * @desc: pointer to buffer used for descriptor construction
  315. * @cdata: pointer to block cipher transform definitions
  316. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  317. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  318. * @adata: pointer to authentication transform definitions.
  319. * A split key is required for SEC Era < 6; the size of the split key
  320. * is specified in this case. Valid algorithm values - one of
  321. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  322. * with OP_ALG_AAI_HMAC_PRECOMP.
  323. * @ivsize: initialization vector size
  324. * @icvsize: integrity check value (ICV) size (truncated or full)
  325. * @geniv: whether to generate Encrypted Chain IV
  326. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  327. * @nonce: pointer to rfc3686 nonce
  328. * @ctx1_iv_off: IV offset in CONTEXT1 register
  329. * @is_qi: true when called from caam/qi
  330. * @era: SEC Era
  331. */
  332. void cnstr_shdsc_aead_decap(u32 * const desc, struct alginfo *cdata,
  333. struct alginfo *adata, unsigned int ivsize,
  334. unsigned int icvsize, const bool geniv,
  335. const bool is_rfc3686, u32 *nonce,
  336. const u32 ctx1_iv_off, const bool is_qi, int era)
  337. {
  338. /* Note: Context registers are saved. */
  339. init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
  340. /* Class 2 operation */
  341. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  342. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  343. if (is_qi) {
  344. u32 *wait_load_cmd;
  345. /* REG3 = assoclen */
  346. append_seq_load(desc, 4, LDST_CLASS_DECO |
  347. LDST_SRCDST_WORD_DECO_MATH3 |
  348. (4 << LDST_OFFSET_SHIFT));
  349. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  350. JUMP_COND_CALM | JUMP_COND_NCP |
  351. JUMP_COND_NOP | JUMP_COND_NIP |
  352. JUMP_COND_NIFP);
  353. set_jump_tgt_here(desc, wait_load_cmd);
  354. if (!geniv)
  355. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  356. LDST_SRCDST_BYTE_CONTEXT |
  357. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  358. }
  359. /* Read and write assoclen bytes */
  360. if (is_qi || era < 3) {
  361. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  362. if (geniv)
  363. append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM,
  364. ivsize);
  365. else
  366. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3,
  367. CAAM_CMD_SZ);
  368. } else {
  369. append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  370. if (geniv)
  371. append_math_add_imm_u32(desc, VARSEQOUTLEN, DPOVRD, IMM,
  372. ivsize);
  373. else
  374. append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD,
  375. CAAM_CMD_SZ);
  376. }
  377. /* Skip assoc data */
  378. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  379. /* read assoc before reading payload */
  380. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
  381. KEY_VLF);
  382. if (geniv) {
  383. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  384. LDST_SRCDST_BYTE_CONTEXT |
  385. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  386. append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
  387. (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
  388. }
  389. /* Load Counter into CONTEXT1 reg */
  390. if (is_rfc3686)
  391. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  392. LDST_SRCDST_BYTE_CONTEXT |
  393. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  394. LDST_OFFSET_SHIFT));
  395. /* Choose operation */
  396. if (ctx1_iv_off)
  397. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  398. OP_ALG_DECRYPT);
  399. else
  400. append_dec_op1(desc, cdata->algtype);
  401. /* Read and write cryptlen bytes */
  402. append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  403. append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  404. aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
  405. /* Load ICV */
  406. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
  407. FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
  408. print_hex_dump_debug("aead dec shdesc@" __stringify(__LINE__)": ",
  409. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  410. 1);
  411. }
  412. EXPORT_SYMBOL(cnstr_shdsc_aead_decap);
  413. /**
  414. * cnstr_shdsc_aead_givencap - IPSec ESP encapsulation shared descriptor
  415. * (non-protocol) with HW-generated initialization
  416. * vector.
  417. * @desc: pointer to buffer used for descriptor construction
  418. * @cdata: pointer to block cipher transform definitions
  419. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  420. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  421. * @adata: pointer to authentication transform definitions.
  422. * A split key is required for SEC Era < 6; the size of the split key
  423. * is specified in this case. Valid algorithm values - one of
  424. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  425. * with OP_ALG_AAI_HMAC_PRECOMP.
  426. * @ivsize: initialization vector size
  427. * @icvsize: integrity check value (ICV) size (truncated or full)
  428. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  429. * @nonce: pointer to rfc3686 nonce
  430. * @ctx1_iv_off: IV offset in CONTEXT1 register
  431. * @is_qi: true when called from caam/qi
  432. * @era: SEC Era
  433. */
  434. void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata,
  435. struct alginfo *adata, unsigned int ivsize,
  436. unsigned int icvsize, const bool is_rfc3686,
  437. u32 *nonce, const u32 ctx1_iv_off,
  438. const bool is_qi, int era)
  439. {
  440. u32 geniv, moveiv;
  441. u32 *wait_cmd;
  442. /* Note: Context registers are saved. */
  443. init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
  444. if (is_qi) {
  445. u32 *wait_load_cmd;
  446. /* REG3 = assoclen */
  447. append_seq_load(desc, 4, LDST_CLASS_DECO |
  448. LDST_SRCDST_WORD_DECO_MATH3 |
  449. (4 << LDST_OFFSET_SHIFT));
  450. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  451. JUMP_COND_CALM | JUMP_COND_NCP |
  452. JUMP_COND_NOP | JUMP_COND_NIP |
  453. JUMP_COND_NIFP);
  454. set_jump_tgt_here(desc, wait_load_cmd);
  455. }
  456. if (is_rfc3686) {
  457. if (is_qi)
  458. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  459. LDST_SRCDST_BYTE_CONTEXT |
  460. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  461. goto copy_iv;
  462. }
  463. /* Generate IV */
  464. geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
  465. NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
  466. NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
  467. append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
  468. LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
  469. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  470. append_move(desc, MOVE_WAITCOMP |
  471. MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
  472. (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
  473. (ivsize << MOVE_LEN_SHIFT));
  474. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  475. copy_iv:
  476. /* Copy IV to class 1 context */
  477. append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
  478. (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
  479. (ivsize << MOVE_LEN_SHIFT));
  480. /* Return to encryption */
  481. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  482. OP_ALG_ENCRYPT);
  483. /* Read and write assoclen bytes */
  484. if (is_qi || era < 3) {
  485. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  486. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  487. } else {
  488. append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  489. append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  490. }
  491. /* Skip assoc data */
  492. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  493. /* read assoc before reading payload */
  494. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
  495. KEY_VLF);
  496. /* Copy iv from outfifo to class 2 fifo */
  497. moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
  498. NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
  499. append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
  500. LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
  501. append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
  502. LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
  503. /* Load Counter into CONTEXT1 reg */
  504. if (is_rfc3686)
  505. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  506. LDST_SRCDST_BYTE_CONTEXT |
  507. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  508. LDST_OFFSET_SHIFT));
  509. /* Class 1 operation */
  510. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  511. OP_ALG_ENCRYPT);
  512. /* Will write ivsize + cryptlen */
  513. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  514. /* Not need to reload iv */
  515. append_seq_fifo_load(desc, ivsize,
  516. FIFOLD_CLASS_SKIP);
  517. /* Will read cryptlen */
  518. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  519. /*
  520. * Wait for IV transfer (ofifo -> class2) to finish before starting
  521. * ciphertext transfer (ofifo -> external memory).
  522. */
  523. wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NIFP);
  524. set_jump_tgt_here(desc, wait_cmd);
  525. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
  526. FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
  527. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
  528. /* Write ICV */
  529. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  530. LDST_SRCDST_BYTE_CONTEXT);
  531. print_hex_dump_debug("aead givenc shdesc@" __stringify(__LINE__)": ",
  532. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  533. 1);
  534. }
  535. EXPORT_SYMBOL(cnstr_shdsc_aead_givencap);
  536. /**
  537. * cnstr_shdsc_gcm_encap - gcm encapsulation shared descriptor
  538. * @desc: pointer to buffer used for descriptor construction
  539. * @cdata: pointer to block cipher transform definitions
  540. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  541. * @ivsize: initialization vector size
  542. * @icvsize: integrity check value (ICV) size (truncated or full)
  543. * @is_qi: true when called from caam/qi
  544. */
  545. void cnstr_shdsc_gcm_encap(u32 * const desc, struct alginfo *cdata,
  546. unsigned int ivsize, unsigned int icvsize,
  547. const bool is_qi)
  548. {
  549. u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1,
  550. *zero_assoc_jump_cmd2;
  551. init_sh_desc(desc, HDR_SHARE_SERIAL);
  552. /* skip key loading if they are loaded due to sharing */
  553. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  554. JUMP_COND_SHRD);
  555. if (cdata->key_inline)
  556. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  557. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  558. else
  559. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  560. KEY_DEST_CLASS_REG);
  561. set_jump_tgt_here(desc, key_jump_cmd);
  562. /* class 1 operation */
  563. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  564. OP_ALG_ENCRYPT);
  565. if (is_qi) {
  566. u32 *wait_load_cmd;
  567. /* REG3 = assoclen */
  568. append_seq_load(desc, 4, LDST_CLASS_DECO |
  569. LDST_SRCDST_WORD_DECO_MATH3 |
  570. (4 << LDST_OFFSET_SHIFT));
  571. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  572. JUMP_COND_CALM | JUMP_COND_NCP |
  573. JUMP_COND_NOP | JUMP_COND_NIP |
  574. JUMP_COND_NIFP);
  575. set_jump_tgt_here(desc, wait_load_cmd);
  576. append_math_sub_imm_u32(desc, VARSEQOUTLEN, SEQINLEN, IMM,
  577. ivsize);
  578. } else {
  579. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0,
  580. CAAM_CMD_SZ);
  581. }
  582. /* if assoclen + cryptlen is ZERO, skip to ICV write */
  583. zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
  584. JUMP_COND_MATH_Z);
  585. if (is_qi)
  586. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  587. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  588. /* if assoclen is ZERO, skip reading the assoc data */
  589. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  590. zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
  591. JUMP_COND_MATH_Z);
  592. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  593. /* skip assoc data */
  594. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  595. /* cryptlen = seqinlen - assoclen */
  596. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
  597. /* if cryptlen is ZERO jump to zero-payload commands */
  598. zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
  599. JUMP_COND_MATH_Z);
  600. /* read assoc data */
  601. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  602. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  603. set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
  604. append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  605. /* write encrypted data */
  606. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  607. /* read payload data */
  608. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  609. FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
  610. /* jump to ICV writing */
  611. if (is_qi)
  612. append_jump(desc, JUMP_TEST_ALL | 4);
  613. else
  614. append_jump(desc, JUMP_TEST_ALL | 2);
  615. /* zero-payload commands */
  616. set_jump_tgt_here(desc, zero_payload_jump_cmd);
  617. /* read assoc data */
  618. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  619. FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
  620. if (is_qi)
  621. /* jump to ICV writing */
  622. append_jump(desc, JUMP_TEST_ALL | 2);
  623. /* There is no input data */
  624. set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
  625. if (is_qi)
  626. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  627. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 |
  628. FIFOLD_TYPE_LAST1);
  629. /* write ICV */
  630. append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
  631. LDST_SRCDST_BYTE_CONTEXT);
  632. print_hex_dump_debug("gcm enc shdesc@" __stringify(__LINE__)": ",
  633. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  634. 1);
  635. }
  636. EXPORT_SYMBOL(cnstr_shdsc_gcm_encap);
  637. /**
  638. * cnstr_shdsc_gcm_decap - gcm decapsulation shared descriptor
  639. * @desc: pointer to buffer used for descriptor construction
  640. * @cdata: pointer to block cipher transform definitions
  641. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  642. * @ivsize: initialization vector size
  643. * @icvsize: integrity check value (ICV) size (truncated or full)
  644. * @is_qi: true when called from caam/qi
  645. */
  646. void cnstr_shdsc_gcm_decap(u32 * const desc, struct alginfo *cdata,
  647. unsigned int ivsize, unsigned int icvsize,
  648. const bool is_qi)
  649. {
  650. u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1;
  651. init_sh_desc(desc, HDR_SHARE_SERIAL);
  652. /* skip key loading if they are loaded due to sharing */
  653. key_jump_cmd = append_jump(desc, JUMP_JSL |
  654. JUMP_TEST_ALL | JUMP_COND_SHRD);
  655. if (cdata->key_inline)
  656. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  657. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  658. else
  659. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  660. KEY_DEST_CLASS_REG);
  661. set_jump_tgt_here(desc, key_jump_cmd);
  662. /* class 1 operation */
  663. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  664. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  665. if (is_qi) {
  666. u32 *wait_load_cmd;
  667. /* REG3 = assoclen */
  668. append_seq_load(desc, 4, LDST_CLASS_DECO |
  669. LDST_SRCDST_WORD_DECO_MATH3 |
  670. (4 << LDST_OFFSET_SHIFT));
  671. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  672. JUMP_COND_CALM | JUMP_COND_NCP |
  673. JUMP_COND_NOP | JUMP_COND_NIP |
  674. JUMP_COND_NIFP);
  675. set_jump_tgt_here(desc, wait_load_cmd);
  676. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  677. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  678. }
  679. /* if assoclen is ZERO, skip reading the assoc data */
  680. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  681. zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
  682. JUMP_COND_MATH_Z);
  683. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  684. /* skip assoc data */
  685. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  686. /* read assoc data */
  687. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  688. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  689. set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
  690. /* cryptlen = seqoutlen - assoclen */
  691. append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  692. /* jump to zero-payload command if cryptlen is zero */
  693. zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
  694. JUMP_COND_MATH_Z);
  695. append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  696. /* store encrypted data */
  697. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  698. /* read payload data */
  699. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  700. FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  701. /* zero-payload command */
  702. set_jump_tgt_here(desc, zero_payload_jump_cmd);
  703. /* read ICV */
  704. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
  705. FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
  706. print_hex_dump_debug("gcm dec shdesc@" __stringify(__LINE__)": ",
  707. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  708. 1);
  709. }
  710. EXPORT_SYMBOL(cnstr_shdsc_gcm_decap);
  711. /**
  712. * cnstr_shdsc_rfc4106_encap - IPSec ESP gcm encapsulation shared descriptor
  713. * (non-protocol).
  714. * @desc: pointer to buffer used for descriptor construction
  715. * @cdata: pointer to block cipher transform definitions
  716. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  717. * @ivsize: initialization vector size
  718. * @icvsize: integrity check value (ICV) size (truncated or full)
  719. * @is_qi: true when called from caam/qi
  720. *
  721. * Input sequence: AAD | PTXT
  722. * Output sequence: AAD | CTXT | ICV
  723. * AAD length (assoclen), which includes the IV length, is available in Math3.
  724. */
  725. void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata,
  726. unsigned int ivsize, unsigned int icvsize,
  727. const bool is_qi)
  728. {
  729. u32 *key_jump_cmd, *zero_cryptlen_jump_cmd, *skip_instructions;
  730. init_sh_desc(desc, HDR_SHARE_SERIAL);
  731. /* Skip key loading if it is loaded due to sharing */
  732. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  733. JUMP_COND_SHRD);
  734. if (cdata->key_inline)
  735. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  736. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  737. else
  738. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  739. KEY_DEST_CLASS_REG);
  740. set_jump_tgt_here(desc, key_jump_cmd);
  741. /* Class 1 operation */
  742. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  743. OP_ALG_ENCRYPT);
  744. if (is_qi) {
  745. u32 *wait_load_cmd;
  746. /* REG3 = assoclen */
  747. append_seq_load(desc, 4, LDST_CLASS_DECO |
  748. LDST_SRCDST_WORD_DECO_MATH3 |
  749. (4 << LDST_OFFSET_SHIFT));
  750. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  751. JUMP_COND_CALM | JUMP_COND_NCP |
  752. JUMP_COND_NOP | JUMP_COND_NIP |
  753. JUMP_COND_NIFP);
  754. set_jump_tgt_here(desc, wait_load_cmd);
  755. /* Read salt and IV */
  756. append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
  757. cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
  758. FIFOLD_TYPE_IV);
  759. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  760. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  761. }
  762. append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
  763. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  764. /* Skip AAD */
  765. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  766. /* Read cryptlen and set this value into VARSEQOUTLEN */
  767. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
  768. /* If cryptlen is ZERO jump to AAD command */
  769. zero_cryptlen_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
  770. JUMP_COND_MATH_Z);
  771. /* Read AAD data */
  772. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  773. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  774. /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
  775. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA);
  776. /* Skip IV */
  777. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
  778. append_math_add(desc, VARSEQINLEN, VARSEQOUTLEN, REG0, CAAM_CMD_SZ);
  779. /* Write encrypted data */
  780. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  781. /* Read payload data */
  782. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  783. FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
  784. /* Jump instructions to avoid double reading of AAD */
  785. skip_instructions = append_jump(desc, JUMP_TEST_ALL);
  786. /* There is no input data, cryptlen = 0 */
  787. set_jump_tgt_here(desc, zero_cryptlen_jump_cmd);
  788. /* Read AAD */
  789. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  790. FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
  791. set_jump_tgt_here(desc, skip_instructions);
  792. /* Write ICV */
  793. append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
  794. LDST_SRCDST_BYTE_CONTEXT);
  795. print_hex_dump_debug("rfc4106 enc shdesc@" __stringify(__LINE__)": ",
  796. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  797. 1);
  798. }
  799. EXPORT_SYMBOL(cnstr_shdsc_rfc4106_encap);
  800. /**
  801. * cnstr_shdsc_rfc4106_decap - IPSec ESP gcm decapsulation shared descriptor
  802. * (non-protocol).
  803. * @desc: pointer to buffer used for descriptor construction
  804. * @cdata: pointer to block cipher transform definitions
  805. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  806. * @ivsize: initialization vector size
  807. * @icvsize: integrity check value (ICV) size (truncated or full)
  808. * @is_qi: true when called from caam/qi
  809. */
  810. void cnstr_shdsc_rfc4106_decap(u32 * const desc, struct alginfo *cdata,
  811. unsigned int ivsize, unsigned int icvsize,
  812. const bool is_qi)
  813. {
  814. u32 *key_jump_cmd;
  815. init_sh_desc(desc, HDR_SHARE_SERIAL);
  816. /* Skip key loading if it is loaded due to sharing */
  817. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  818. JUMP_COND_SHRD);
  819. if (cdata->key_inline)
  820. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  821. cdata->keylen, CLASS_1 |
  822. KEY_DEST_CLASS_REG);
  823. else
  824. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  825. KEY_DEST_CLASS_REG);
  826. set_jump_tgt_here(desc, key_jump_cmd);
  827. /* Class 1 operation */
  828. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  829. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  830. if (is_qi) {
  831. u32 *wait_load_cmd;
  832. /* REG3 = assoclen */
  833. append_seq_load(desc, 4, LDST_CLASS_DECO |
  834. LDST_SRCDST_WORD_DECO_MATH3 |
  835. (4 << LDST_OFFSET_SHIFT));
  836. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  837. JUMP_COND_CALM | JUMP_COND_NCP |
  838. JUMP_COND_NOP | JUMP_COND_NIP |
  839. JUMP_COND_NIFP);
  840. set_jump_tgt_here(desc, wait_load_cmd);
  841. /* Read salt and IV */
  842. append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
  843. cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
  844. FIFOLD_TYPE_IV);
  845. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  846. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  847. }
  848. append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
  849. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  850. /* Read assoc data */
  851. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  852. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  853. /* Skip IV */
  854. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
  855. /* Will read cryptlen bytes */
  856. append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
  857. /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
  858. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
  859. /* Skip assoc data */
  860. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  861. /* Will write cryptlen bytes */
  862. append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  863. /* Store payload data */
  864. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  865. /* Read encrypted data */
  866. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  867. FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  868. /* Read ICV */
  869. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
  870. FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
  871. print_hex_dump_debug("rfc4106 dec shdesc@" __stringify(__LINE__)": ",
  872. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  873. 1);
  874. }
  875. EXPORT_SYMBOL(cnstr_shdsc_rfc4106_decap);
  876. /**
  877. * cnstr_shdsc_rfc4543_encap - IPSec ESP gmac encapsulation shared descriptor
  878. * (non-protocol).
  879. * @desc: pointer to buffer used for descriptor construction
  880. * @cdata: pointer to block cipher transform definitions
  881. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  882. * @ivsize: initialization vector size
  883. * @icvsize: integrity check value (ICV) size (truncated or full)
  884. * @is_qi: true when called from caam/qi
  885. */
  886. void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata,
  887. unsigned int ivsize, unsigned int icvsize,
  888. const bool is_qi)
  889. {
  890. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
  891. init_sh_desc(desc, HDR_SHARE_SERIAL);
  892. /* Skip key loading if it is loaded due to sharing */
  893. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  894. JUMP_COND_SHRD);
  895. if (cdata->key_inline)
  896. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  897. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  898. else
  899. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  900. KEY_DEST_CLASS_REG);
  901. set_jump_tgt_here(desc, key_jump_cmd);
  902. /* Class 1 operation */
  903. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  904. OP_ALG_ENCRYPT);
  905. if (is_qi) {
  906. /* assoclen is not needed, skip it */
  907. append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
  908. /* Read salt and IV */
  909. append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
  910. cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
  911. FIFOLD_TYPE_IV);
  912. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  913. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  914. }
  915. /* assoclen + cryptlen = seqinlen */
  916. append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
  917. /*
  918. * MOVE_LEN opcode is not available in all SEC HW revisions,
  919. * thus need to do some magic, i.e. self-patch the descriptor
  920. * buffer.
  921. */
  922. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
  923. (0x6 << MOVE_LEN_SHIFT));
  924. write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
  925. (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
  926. /* Will read assoclen + cryptlen bytes */
  927. append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  928. /* Will write assoclen + cryptlen bytes */
  929. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  930. /* Read and write assoclen + cryptlen bytes */
  931. aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
  932. set_move_tgt_here(desc, read_move_cmd);
  933. set_move_tgt_here(desc, write_move_cmd);
  934. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  935. /* Move payload data to OFIFO */
  936. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
  937. /* Write ICV */
  938. append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
  939. LDST_SRCDST_BYTE_CONTEXT);
  940. print_hex_dump_debug("rfc4543 enc shdesc@" __stringify(__LINE__)": ",
  941. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  942. 1);
  943. }
  944. EXPORT_SYMBOL(cnstr_shdsc_rfc4543_encap);
  945. /**
  946. * cnstr_shdsc_rfc4543_decap - IPSec ESP gmac decapsulation shared descriptor
  947. * (non-protocol).
  948. * @desc: pointer to buffer used for descriptor construction
  949. * @cdata: pointer to block cipher transform definitions
  950. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  951. * @ivsize: initialization vector size
  952. * @icvsize: integrity check value (ICV) size (truncated or full)
  953. * @is_qi: true when called from caam/qi
  954. */
  955. void cnstr_shdsc_rfc4543_decap(u32 * const desc, struct alginfo *cdata,
  956. unsigned int ivsize, unsigned int icvsize,
  957. const bool is_qi)
  958. {
  959. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
  960. init_sh_desc(desc, HDR_SHARE_SERIAL);
  961. /* Skip key loading if it is loaded due to sharing */
  962. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  963. JUMP_COND_SHRD);
  964. if (cdata->key_inline)
  965. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  966. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  967. else
  968. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  969. KEY_DEST_CLASS_REG);
  970. set_jump_tgt_here(desc, key_jump_cmd);
  971. /* Class 1 operation */
  972. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  973. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  974. if (is_qi) {
  975. /* assoclen is not needed, skip it */
  976. append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
  977. /* Read salt and IV */
  978. append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
  979. cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
  980. FIFOLD_TYPE_IV);
  981. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  982. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  983. }
  984. /* assoclen + cryptlen = seqoutlen */
  985. append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  986. /*
  987. * MOVE_LEN opcode is not available in all SEC HW revisions,
  988. * thus need to do some magic, i.e. self-patch the descriptor
  989. * buffer.
  990. */
  991. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
  992. (0x6 << MOVE_LEN_SHIFT));
  993. write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
  994. (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
  995. /* Will read assoclen + cryptlen bytes */
  996. append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  997. /* Will write assoclen + cryptlen bytes */
  998. append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  999. /* Store payload data */
  1000. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  1001. /* In-snoop assoclen + cryptlen data */
  1002. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
  1003. FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
  1004. set_move_tgt_here(desc, read_move_cmd);
  1005. set_move_tgt_here(desc, write_move_cmd);
  1006. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  1007. /* Move payload data to OFIFO */
  1008. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
  1009. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  1010. /* Read ICV */
  1011. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
  1012. FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
  1013. print_hex_dump_debug("rfc4543 dec shdesc@" __stringify(__LINE__)": ",
  1014. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  1015. 1);
  1016. }
  1017. EXPORT_SYMBOL(cnstr_shdsc_rfc4543_decap);
  1018. /**
  1019. * cnstr_shdsc_chachapoly - Chacha20 + Poly1305 generic AEAD (rfc7539) and
  1020. * IPsec ESP (rfc7634, a.k.a. rfc7539esp) shared
  1021. * descriptor (non-protocol).
  1022. * @desc: pointer to buffer used for descriptor construction
  1023. * @cdata: pointer to block cipher transform definitions
  1024. * Valid algorithm values - OP_ALG_ALGSEL_CHACHA20 ANDed with
  1025. * OP_ALG_AAI_AEAD.
  1026. * @adata: pointer to authentication transform definitions
  1027. * Valid algorithm values - OP_ALG_ALGSEL_POLY1305 ANDed with
  1028. * OP_ALG_AAI_AEAD.
  1029. * @ivsize: initialization vector size
  1030. * @icvsize: integrity check value (ICV) size (truncated or full)
  1031. * @encap: true if encapsulation, false if decapsulation
  1032. * @is_qi: true when called from caam/qi
  1033. */
  1034. void cnstr_shdsc_chachapoly(u32 * const desc, struct alginfo *cdata,
  1035. struct alginfo *adata, unsigned int ivsize,
  1036. unsigned int icvsize, const bool encap,
  1037. const bool is_qi)
  1038. {
  1039. u32 *key_jump_cmd, *wait_cmd;
  1040. u32 nfifo;
  1041. const bool is_ipsec = (ivsize != CHACHAPOLY_IV_SIZE);
  1042. /* Note: Context registers are saved. */
  1043. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1044. /* skip key loading if they are loaded due to sharing */
  1045. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1046. JUMP_COND_SHRD);
  1047. append_key_as_imm(desc, cdata->key_virt, cdata->keylen, cdata->keylen,
  1048. CLASS_1 | KEY_DEST_CLASS_REG);
  1049. /* For IPsec load the salt from keymat in the context register */
  1050. if (is_ipsec)
  1051. append_load_as_imm(desc, cdata->key_virt + cdata->keylen, 4,
  1052. LDST_CLASS_1_CCB | LDST_SRCDST_BYTE_CONTEXT |
  1053. 4 << LDST_OFFSET_SHIFT);
  1054. set_jump_tgt_here(desc, key_jump_cmd);
  1055. /* Class 2 and 1 operations: Poly & ChaCha */
  1056. if (encap) {
  1057. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  1058. OP_ALG_ENCRYPT);
  1059. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  1060. OP_ALG_ENCRYPT);
  1061. } else {
  1062. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  1063. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  1064. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  1065. OP_ALG_DECRYPT);
  1066. }
  1067. if (is_qi) {
  1068. u32 *wait_load_cmd;
  1069. u32 ctx1_iv_off = is_ipsec ? 8 : 4;
  1070. /* REG3 = assoclen */
  1071. append_seq_load(desc, 4, LDST_CLASS_DECO |
  1072. LDST_SRCDST_WORD_DECO_MATH3 |
  1073. 4 << LDST_OFFSET_SHIFT);
  1074. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1075. JUMP_COND_CALM | JUMP_COND_NCP |
  1076. JUMP_COND_NOP | JUMP_COND_NIP |
  1077. JUMP_COND_NIFP);
  1078. set_jump_tgt_here(desc, wait_load_cmd);
  1079. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  1080. LDST_SRCDST_BYTE_CONTEXT |
  1081. ctx1_iv_off << LDST_OFFSET_SHIFT);
  1082. }
  1083. /*
  1084. * MAGIC with NFIFO
  1085. * Read associated data from the input and send them to class1 and
  1086. * class2 alignment blocks. From class1 send data to output fifo and
  1087. * then write it to memory since we don't need to encrypt AD.
  1088. */
  1089. nfifo = NFIFOENTRY_DEST_BOTH | NFIFOENTRY_FC1 | NFIFOENTRY_FC2 |
  1090. NFIFOENTRY_DTYPE_POLY | NFIFOENTRY_BND;
  1091. append_load_imm_u32(desc, nfifo, LDST_CLASS_IND_CCB |
  1092. LDST_SRCDST_WORD_INFO_FIFO_SM | LDLEN_MATH3);
  1093. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  1094. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  1095. append_seq_fifo_load(desc, 0, FIFOLD_TYPE_NOINFOFIFO |
  1096. FIFOLD_CLASS_CLASS1 | LDST_VLF);
  1097. append_move_len(desc, MOVE_AUX_LS | MOVE_SRC_AUX_ABLK |
  1098. MOVE_DEST_OUTFIFO | MOVELEN_MRSEL_MATH3);
  1099. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | LDST_VLF);
  1100. /* IPsec - copy IV at the output */
  1101. if (is_ipsec)
  1102. append_seq_fifo_store(desc, ivsize, FIFOST_TYPE_METADATA |
  1103. 0x2 << 25);
  1104. wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TYPE_LOCAL |
  1105. JUMP_COND_NOP | JUMP_TEST_ALL);
  1106. set_jump_tgt_here(desc, wait_cmd);
  1107. if (encap) {
  1108. /* Read and write cryptlen bytes */
  1109. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  1110. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0,
  1111. CAAM_CMD_SZ);
  1112. aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
  1113. /* Write ICV */
  1114. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  1115. LDST_SRCDST_BYTE_CONTEXT);
  1116. } else {
  1117. /* Read and write cryptlen bytes */
  1118. append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0,
  1119. CAAM_CMD_SZ);
  1120. append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0,
  1121. CAAM_CMD_SZ);
  1122. aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
  1123. /* Load ICV for verification */
  1124. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
  1125. FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
  1126. }
  1127. print_hex_dump_debug("chachapoly shdesc@" __stringify(__LINE__)": ",
  1128. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  1129. 1);
  1130. }
  1131. EXPORT_SYMBOL(cnstr_shdsc_chachapoly);
  1132. /* For skcipher encrypt and decrypt, read from req->src and write to req->dst */
  1133. static inline void skcipher_append_src_dst(u32 *desc)
  1134. {
  1135. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  1136. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  1137. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
  1138. KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
  1139. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
  1140. }
  1141. /**
  1142. * cnstr_shdsc_skcipher_encap - skcipher encapsulation shared descriptor
  1143. * @desc: pointer to buffer used for descriptor construction
  1144. * @cdata: pointer to block cipher transform definitions
  1145. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  1146. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128
  1147. * - OP_ALG_ALGSEL_CHACHA20
  1148. * @ivsize: initialization vector size
  1149. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  1150. * @ctx1_iv_off: IV offset in CONTEXT1 register
  1151. */
  1152. void cnstr_shdsc_skcipher_encap(u32 * const desc, struct alginfo *cdata,
  1153. unsigned int ivsize, const bool is_rfc3686,
  1154. const u32 ctx1_iv_off)
  1155. {
  1156. u32 *key_jump_cmd;
  1157. u32 options = cdata->algtype | OP_ALG_AS_INIT | OP_ALG_ENCRYPT;
  1158. bool is_chacha20 = ((cdata->algtype & OP_ALG_ALGSEL_MASK) ==
  1159. OP_ALG_ALGSEL_CHACHA20);
  1160. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1161. /* Skip if already shared */
  1162. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1163. JUMP_COND_SHRD);
  1164. /* Load class1 key only */
  1165. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1166. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1167. /* Load nonce into CONTEXT1 reg */
  1168. if (is_rfc3686) {
  1169. const u8 *nonce = cdata->key_virt + cdata->keylen;
  1170. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  1171. LDST_CLASS_IND_CCB |
  1172. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  1173. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
  1174. MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
  1175. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  1176. }
  1177. set_jump_tgt_here(desc, key_jump_cmd);
  1178. /* Load IV, if there is one */
  1179. if (ivsize)
  1180. append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  1181. LDST_CLASS_1_CCB | (ctx1_iv_off <<
  1182. LDST_OFFSET_SHIFT));
  1183. /* Load counter into CONTEXT1 reg */
  1184. if (is_rfc3686)
  1185. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  1186. LDST_SRCDST_BYTE_CONTEXT |
  1187. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  1188. LDST_OFFSET_SHIFT));
  1189. /* Load operation */
  1190. if (is_chacha20)
  1191. options |= OP_ALG_AS_FINALIZE;
  1192. append_operation(desc, options);
  1193. /* Perform operation */
  1194. skcipher_append_src_dst(desc);
  1195. /* Store IV */
  1196. if (!is_chacha20 && ivsize)
  1197. append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  1198. LDST_CLASS_1_CCB | (ctx1_iv_off <<
  1199. LDST_OFFSET_SHIFT));
  1200. print_hex_dump_debug("skcipher enc shdesc@" __stringify(__LINE__)": ",
  1201. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  1202. 1);
  1203. }
  1204. EXPORT_SYMBOL(cnstr_shdsc_skcipher_encap);
  1205. /**
  1206. * cnstr_shdsc_skcipher_decap - skcipher decapsulation shared descriptor
  1207. * @desc: pointer to buffer used for descriptor construction
  1208. * @cdata: pointer to block cipher transform definitions
  1209. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  1210. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128
  1211. * - OP_ALG_ALGSEL_CHACHA20
  1212. * @ivsize: initialization vector size
  1213. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  1214. * @ctx1_iv_off: IV offset in CONTEXT1 register
  1215. */
  1216. void cnstr_shdsc_skcipher_decap(u32 * const desc, struct alginfo *cdata,
  1217. unsigned int ivsize, const bool is_rfc3686,
  1218. const u32 ctx1_iv_off)
  1219. {
  1220. u32 *key_jump_cmd;
  1221. bool is_chacha20 = ((cdata->algtype & OP_ALG_ALGSEL_MASK) ==
  1222. OP_ALG_ALGSEL_CHACHA20);
  1223. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1224. /* Skip if already shared */
  1225. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1226. JUMP_COND_SHRD);
  1227. /* Load class1 key only */
  1228. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1229. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1230. /* Load nonce into CONTEXT1 reg */
  1231. if (is_rfc3686) {
  1232. const u8 *nonce = cdata->key_virt + cdata->keylen;
  1233. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  1234. LDST_CLASS_IND_CCB |
  1235. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  1236. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
  1237. MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
  1238. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  1239. }
  1240. set_jump_tgt_here(desc, key_jump_cmd);
  1241. /* Load IV, if there is one */
  1242. if (ivsize)
  1243. append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  1244. LDST_CLASS_1_CCB | (ctx1_iv_off <<
  1245. LDST_OFFSET_SHIFT));
  1246. /* Load counter into CONTEXT1 reg */
  1247. if (is_rfc3686)
  1248. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  1249. LDST_SRCDST_BYTE_CONTEXT |
  1250. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  1251. LDST_OFFSET_SHIFT));
  1252. /* Choose operation */
  1253. if (ctx1_iv_off)
  1254. append_operation(desc, cdata->algtype | OP_ALG_AS_INIT |
  1255. OP_ALG_DECRYPT);
  1256. else
  1257. append_dec_op1(desc, cdata->algtype);
  1258. /* Perform operation */
  1259. skcipher_append_src_dst(desc);
  1260. /* Store IV */
  1261. if (!is_chacha20 && ivsize)
  1262. append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  1263. LDST_CLASS_1_CCB | (ctx1_iv_off <<
  1264. LDST_OFFSET_SHIFT));
  1265. print_hex_dump_debug("skcipher dec shdesc@" __stringify(__LINE__)": ",
  1266. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  1267. 1);
  1268. }
  1269. EXPORT_SYMBOL(cnstr_shdsc_skcipher_decap);
  1270. /**
  1271. * cnstr_shdsc_xts_skcipher_encap - xts skcipher encapsulation shared descriptor
  1272. * @desc: pointer to buffer used for descriptor construction
  1273. * @cdata: pointer to block cipher transform definitions
  1274. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
  1275. */
  1276. void cnstr_shdsc_xts_skcipher_encap(u32 * const desc, struct alginfo *cdata)
  1277. {
  1278. /*
  1279. * Set sector size to a big value, practically disabling
  1280. * sector size segmentation in xts implementation. We cannot
  1281. * take full advantage of this HW feature with existing
  1282. * crypto API / dm-crypt SW architecture.
  1283. */
  1284. __be64 sector_size = cpu_to_be64(BIT(15));
  1285. u32 *key_jump_cmd;
  1286. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1287. /* Skip if already shared */
  1288. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1289. JUMP_COND_SHRD);
  1290. /* Load class1 keys only */
  1291. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1292. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1293. /* Load sector size with index 40 bytes (0x28) */
  1294. append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
  1295. LDST_SRCDST_BYTE_CONTEXT |
  1296. (0x28 << LDST_OFFSET_SHIFT));
  1297. set_jump_tgt_here(desc, key_jump_cmd);
  1298. /*
  1299. * create sequence for loading the sector index / 16B tweak value
  1300. * Lower 8B of IV - sector index / tweak lower half
  1301. * Upper 8B of IV - upper half of 16B tweak
  1302. */
  1303. append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1304. (0x20 << LDST_OFFSET_SHIFT));
  1305. append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1306. (0x30 << LDST_OFFSET_SHIFT));
  1307. /* Load operation */
  1308. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  1309. OP_ALG_ENCRYPT);
  1310. /* Perform operation */
  1311. skcipher_append_src_dst(desc);
  1312. /* Store lower 8B and upper 8B of IV */
  1313. append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1314. (0x20 << LDST_OFFSET_SHIFT));
  1315. append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1316. (0x30 << LDST_OFFSET_SHIFT));
  1317. print_hex_dump_debug("xts skcipher enc shdesc@" __stringify(__LINE__)
  1318. ": ", DUMP_PREFIX_ADDRESS, 16, 4,
  1319. desc, desc_bytes(desc), 1);
  1320. }
  1321. EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_encap);
  1322. /**
  1323. * cnstr_shdsc_xts_skcipher_decap - xts skcipher decapsulation shared descriptor
  1324. * @desc: pointer to buffer used for descriptor construction
  1325. * @cdata: pointer to block cipher transform definitions
  1326. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
  1327. */
  1328. void cnstr_shdsc_xts_skcipher_decap(u32 * const desc, struct alginfo *cdata)
  1329. {
  1330. /*
  1331. * Set sector size to a big value, practically disabling
  1332. * sector size segmentation in xts implementation. We cannot
  1333. * take full advantage of this HW feature with existing
  1334. * crypto API / dm-crypt SW architecture.
  1335. */
  1336. __be64 sector_size = cpu_to_be64(BIT(15));
  1337. u32 *key_jump_cmd;
  1338. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1339. /* Skip if already shared */
  1340. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1341. JUMP_COND_SHRD);
  1342. /* Load class1 key only */
  1343. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1344. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1345. /* Load sector size with index 40 bytes (0x28) */
  1346. append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
  1347. LDST_SRCDST_BYTE_CONTEXT |
  1348. (0x28 << LDST_OFFSET_SHIFT));
  1349. set_jump_tgt_here(desc, key_jump_cmd);
  1350. /*
  1351. * create sequence for loading the sector index / 16B tweak value
  1352. * Lower 8B of IV - sector index / tweak lower half
  1353. * Upper 8B of IV - upper half of 16B tweak
  1354. */
  1355. append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1356. (0x20 << LDST_OFFSET_SHIFT));
  1357. append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1358. (0x30 << LDST_OFFSET_SHIFT));
  1359. /* Load operation */
  1360. append_dec_op1(desc, cdata->algtype);
  1361. /* Perform operation */
  1362. skcipher_append_src_dst(desc);
  1363. /* Store lower 8B and upper 8B of IV */
  1364. append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1365. (0x20 << LDST_OFFSET_SHIFT));
  1366. append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1367. (0x30 << LDST_OFFSET_SHIFT));
  1368. print_hex_dump_debug("xts skcipher dec shdesc@" __stringify(__LINE__)
  1369. ": ", DUMP_PREFIX_ADDRESS, 16, 4, desc,
  1370. desc_bytes(desc), 1);
  1371. }
  1372. EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_decap);
  1373. MODULE_LICENSE("GPL");
  1374. MODULE_DESCRIPTION("FSL CAAM descriptor support");
  1375. MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");