zcrypt_ccamisc.c 52 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989
  1. // SPDX-License-Identifier: GPL-2.0+
  2. /*
  3. * Copyright IBM Corp. 2019
  4. * Author(s): Harald Freudenberger <[email protected]>
  5. * Ingo Franzki <[email protected]>
  6. *
  7. * Collection of CCA misc functions used by zcrypt and pkey
  8. */
  9. #define KMSG_COMPONENT "zcrypt"
  10. #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
  11. #include <linux/init.h>
  12. #include <linux/module.h>
  13. #include <linux/slab.h>
  14. #include <linux/random.h>
  15. #include <asm/zcrypt.h>
  16. #include <asm/pkey.h>
  17. #include "ap_bus.h"
  18. #include "zcrypt_api.h"
  19. #include "zcrypt_debug.h"
  20. #include "zcrypt_msgtype6.h"
  21. #include "zcrypt_ccamisc.h"
  22. #define DEBUG_DBG(...) ZCRYPT_DBF(DBF_DEBUG, ##__VA_ARGS__)
  23. #define DEBUG_INFO(...) ZCRYPT_DBF(DBF_INFO, ##__VA_ARGS__)
  24. #define DEBUG_WARN(...) ZCRYPT_DBF(DBF_WARN, ##__VA_ARGS__)
  25. #define DEBUG_ERR(...) ZCRYPT_DBF(DBF_ERR, ##__VA_ARGS__)
  26. /* Size of parameter block used for all cca requests/replies */
  27. #define PARMBSIZE 512
  28. /* Size of vardata block used for some of the cca requests/replies */
  29. #define VARDATASIZE 4096
  30. struct cca_info_list_entry {
  31. struct list_head list;
  32. u16 cardnr;
  33. u16 domain;
  34. struct cca_info info;
  35. };
  36. /* a list with cca_info_list_entry entries */
  37. static LIST_HEAD(cca_info_list);
  38. static DEFINE_SPINLOCK(cca_info_list_lock);
  39. /*
  40. * Simple check if the token is a valid CCA secure AES data key
  41. * token. If keybitsize is given, the bitsize of the key is
  42. * also checked. Returns 0 on success or errno value on failure.
  43. */
  44. int cca_check_secaeskeytoken(debug_info_t *dbg, int dbflvl,
  45. const u8 *token, int keybitsize)
  46. {
  47. struct secaeskeytoken *t = (struct secaeskeytoken *)token;
  48. #define DBF(...) debug_sprintf_event(dbg, dbflvl, ##__VA_ARGS__)
  49. if (t->type != TOKTYPE_CCA_INTERNAL) {
  50. if (dbg)
  51. DBF("%s token check failed, type 0x%02x != 0x%02x\n",
  52. __func__, (int)t->type, TOKTYPE_CCA_INTERNAL);
  53. return -EINVAL;
  54. }
  55. if (t->version != TOKVER_CCA_AES) {
  56. if (dbg)
  57. DBF("%s token check failed, version 0x%02x != 0x%02x\n",
  58. __func__, (int)t->version, TOKVER_CCA_AES);
  59. return -EINVAL;
  60. }
  61. if (keybitsize > 0 && t->bitsize != keybitsize) {
  62. if (dbg)
  63. DBF("%s token check failed, bitsize %d != %d\n",
  64. __func__, (int)t->bitsize, keybitsize);
  65. return -EINVAL;
  66. }
  67. #undef DBF
  68. return 0;
  69. }
  70. EXPORT_SYMBOL(cca_check_secaeskeytoken);
  71. /*
  72. * Simple check if the token is a valid CCA secure AES cipher key
  73. * token. If keybitsize is given, the bitsize of the key is
  74. * also checked. If checkcpacfexport is enabled, the key is also
  75. * checked for the export flag to allow CPACF export.
  76. * Returns 0 on success or errno value on failure.
  77. */
  78. int cca_check_secaescipherkey(debug_info_t *dbg, int dbflvl,
  79. const u8 *token, int keybitsize,
  80. int checkcpacfexport)
  81. {
  82. struct cipherkeytoken *t = (struct cipherkeytoken *)token;
  83. bool keybitsizeok = true;
  84. #define DBF(...) debug_sprintf_event(dbg, dbflvl, ##__VA_ARGS__)
  85. if (t->type != TOKTYPE_CCA_INTERNAL) {
  86. if (dbg)
  87. DBF("%s token check failed, type 0x%02x != 0x%02x\n",
  88. __func__, (int)t->type, TOKTYPE_CCA_INTERNAL);
  89. return -EINVAL;
  90. }
  91. if (t->version != TOKVER_CCA_VLSC) {
  92. if (dbg)
  93. DBF("%s token check failed, version 0x%02x != 0x%02x\n",
  94. __func__, (int)t->version, TOKVER_CCA_VLSC);
  95. return -EINVAL;
  96. }
  97. if (t->algtype != 0x02) {
  98. if (dbg)
  99. DBF("%s token check failed, algtype 0x%02x != 0x02\n",
  100. __func__, (int)t->algtype);
  101. return -EINVAL;
  102. }
  103. if (t->keytype != 0x0001) {
  104. if (dbg)
  105. DBF("%s token check failed, keytype 0x%04x != 0x0001\n",
  106. __func__, (int)t->keytype);
  107. return -EINVAL;
  108. }
  109. if (t->plfver != 0x00 && t->plfver != 0x01) {
  110. if (dbg)
  111. DBF("%s token check failed, unknown plfver 0x%02x\n",
  112. __func__, (int)t->plfver);
  113. return -EINVAL;
  114. }
  115. if (t->wpllen != 512 && t->wpllen != 576 && t->wpllen != 640) {
  116. if (dbg)
  117. DBF("%s token check failed, unknown wpllen %d\n",
  118. __func__, (int)t->wpllen);
  119. return -EINVAL;
  120. }
  121. if (keybitsize > 0) {
  122. switch (keybitsize) {
  123. case 128:
  124. if (t->wpllen != (t->plfver ? 640 : 512))
  125. keybitsizeok = false;
  126. break;
  127. case 192:
  128. if (t->wpllen != (t->plfver ? 640 : 576))
  129. keybitsizeok = false;
  130. break;
  131. case 256:
  132. if (t->wpllen != 640)
  133. keybitsizeok = false;
  134. break;
  135. default:
  136. keybitsizeok = false;
  137. break;
  138. }
  139. if (!keybitsizeok) {
  140. if (dbg)
  141. DBF("%s token check failed, bitsize %d\n",
  142. __func__, keybitsize);
  143. return -EINVAL;
  144. }
  145. }
  146. if (checkcpacfexport && !(t->kmf1 & KMF1_XPRT_CPAC)) {
  147. if (dbg)
  148. DBF("%s token check failed, XPRT_CPAC bit is 0\n",
  149. __func__);
  150. return -EINVAL;
  151. }
  152. #undef DBF
  153. return 0;
  154. }
  155. EXPORT_SYMBOL(cca_check_secaescipherkey);
  156. /*
  157. * Simple check if the token is a valid CCA secure ECC private
  158. * key token. Returns 0 on success or errno value on failure.
  159. */
  160. int cca_check_sececckeytoken(debug_info_t *dbg, int dbflvl,
  161. const u8 *token, size_t keysize,
  162. int checkcpacfexport)
  163. {
  164. struct eccprivkeytoken *t = (struct eccprivkeytoken *)token;
  165. #define DBF(...) debug_sprintf_event(dbg, dbflvl, ##__VA_ARGS__)
  166. if (t->type != TOKTYPE_CCA_INTERNAL_PKA) {
  167. if (dbg)
  168. DBF("%s token check failed, type 0x%02x != 0x%02x\n",
  169. __func__, (int)t->type, TOKTYPE_CCA_INTERNAL_PKA);
  170. return -EINVAL;
  171. }
  172. if (t->len > keysize) {
  173. if (dbg)
  174. DBF("%s token check failed, len %d > keysize %zu\n",
  175. __func__, (int)t->len, keysize);
  176. return -EINVAL;
  177. }
  178. if (t->secid != 0x20) {
  179. if (dbg)
  180. DBF("%s token check failed, secid 0x%02x != 0x20\n",
  181. __func__, (int)t->secid);
  182. return -EINVAL;
  183. }
  184. if (checkcpacfexport && !(t->kutc & 0x01)) {
  185. if (dbg)
  186. DBF("%s token check failed, XPRTCPAC bit is 0\n",
  187. __func__);
  188. return -EINVAL;
  189. }
  190. #undef DBF
  191. return 0;
  192. }
  193. EXPORT_SYMBOL(cca_check_sececckeytoken);
  194. /*
  195. * Allocate consecutive memory for request CPRB, request param
  196. * block, reply CPRB and reply param block and fill in values
  197. * for the common fields. Returns 0 on success or errno value
  198. * on failure.
  199. */
  200. static int alloc_and_prep_cprbmem(size_t paramblen,
  201. u8 **p_cprb_mem,
  202. struct CPRBX **p_req_cprb,
  203. struct CPRBX **p_rep_cprb)
  204. {
  205. u8 *cprbmem;
  206. size_t cprbplusparamblen = sizeof(struct CPRBX) + paramblen;
  207. struct CPRBX *preqcblk, *prepcblk;
  208. /*
  209. * allocate consecutive memory for request CPRB, request param
  210. * block, reply CPRB and reply param block
  211. */
  212. cprbmem = kcalloc(2, cprbplusparamblen, GFP_KERNEL);
  213. if (!cprbmem)
  214. return -ENOMEM;
  215. preqcblk = (struct CPRBX *)cprbmem;
  216. prepcblk = (struct CPRBX *)(cprbmem + cprbplusparamblen);
  217. /* fill request cprb struct */
  218. preqcblk->cprb_len = sizeof(struct CPRBX);
  219. preqcblk->cprb_ver_id = 0x02;
  220. memcpy(preqcblk->func_id, "T2", 2);
  221. preqcblk->rpl_msgbl = cprbplusparamblen;
  222. if (paramblen) {
  223. preqcblk->req_parmb =
  224. ((u8 __user *)preqcblk) + sizeof(struct CPRBX);
  225. preqcblk->rpl_parmb =
  226. ((u8 __user *)prepcblk) + sizeof(struct CPRBX);
  227. }
  228. *p_cprb_mem = cprbmem;
  229. *p_req_cprb = preqcblk;
  230. *p_rep_cprb = prepcblk;
  231. return 0;
  232. }
  233. /*
  234. * Free the cprb memory allocated with the function above.
  235. * If the scrub value is not zero, the memory is filled
  236. * with zeros before freeing (useful if there was some
  237. * clear key material in there).
  238. */
  239. static void free_cprbmem(void *mem, size_t paramblen, int scrub)
  240. {
  241. if (scrub)
  242. memzero_explicit(mem, 2 * (sizeof(struct CPRBX) + paramblen));
  243. kfree(mem);
  244. }
  245. /*
  246. * Helper function to prepare the xcrb struct
  247. */
  248. static inline void prep_xcrb(struct ica_xcRB *pxcrb,
  249. u16 cardnr,
  250. struct CPRBX *preqcblk,
  251. struct CPRBX *prepcblk)
  252. {
  253. memset(pxcrb, 0, sizeof(*pxcrb));
  254. pxcrb->agent_ID = 0x4341; /* 'CA' */
  255. pxcrb->user_defined = (cardnr == 0xFFFF ? AUTOSELECT : cardnr);
  256. pxcrb->request_control_blk_length =
  257. preqcblk->cprb_len + preqcblk->req_parml;
  258. pxcrb->request_control_blk_addr = (void __user *)preqcblk;
  259. pxcrb->reply_control_blk_length = preqcblk->rpl_msgbl;
  260. pxcrb->reply_control_blk_addr = (void __user *)prepcblk;
  261. }
  262. /*
  263. * Generate (random) CCA AES DATA secure key.
  264. */
  265. int cca_genseckey(u16 cardnr, u16 domain,
  266. u32 keybitsize, u8 *seckey)
  267. {
  268. int i, rc, keysize;
  269. int seckeysize;
  270. u8 *mem, *ptr;
  271. struct CPRBX *preqcblk, *prepcblk;
  272. struct ica_xcRB xcrb;
  273. struct kgreqparm {
  274. u8 subfunc_code[2];
  275. u16 rule_array_len;
  276. struct lv1 {
  277. u16 len;
  278. char key_form[8];
  279. char key_length[8];
  280. char key_type1[8];
  281. char key_type2[8];
  282. } lv1;
  283. struct lv2 {
  284. u16 len;
  285. struct keyid {
  286. u16 len;
  287. u16 attr;
  288. u8 data[SECKEYBLOBSIZE];
  289. } keyid[6];
  290. } lv2;
  291. } __packed * preqparm;
  292. struct kgrepparm {
  293. u8 subfunc_code[2];
  294. u16 rule_array_len;
  295. struct lv3 {
  296. u16 len;
  297. u16 keyblocklen;
  298. struct {
  299. u16 toklen;
  300. u16 tokattr;
  301. u8 tok[];
  302. /* ... some more data ... */
  303. } keyblock;
  304. } lv3;
  305. } __packed * prepparm;
  306. /* get already prepared memory for 2 cprbs with param block each */
  307. rc = alloc_and_prep_cprbmem(PARMBSIZE, &mem, &preqcblk, &prepcblk);
  308. if (rc)
  309. return rc;
  310. /* fill request cprb struct */
  311. preqcblk->domain = domain;
  312. /* fill request cprb param block with KG request */
  313. preqparm = (struct kgreqparm __force *)preqcblk->req_parmb;
  314. memcpy(preqparm->subfunc_code, "KG", 2);
  315. preqparm->rule_array_len = sizeof(preqparm->rule_array_len);
  316. preqparm->lv1.len = sizeof(struct lv1);
  317. memcpy(preqparm->lv1.key_form, "OP ", 8);
  318. switch (keybitsize) {
  319. case PKEY_SIZE_AES_128:
  320. case PKEY_KEYTYPE_AES_128: /* older ioctls used this */
  321. keysize = 16;
  322. memcpy(preqparm->lv1.key_length, "KEYLN16 ", 8);
  323. break;
  324. case PKEY_SIZE_AES_192:
  325. case PKEY_KEYTYPE_AES_192: /* older ioctls used this */
  326. keysize = 24;
  327. memcpy(preqparm->lv1.key_length, "KEYLN24 ", 8);
  328. break;
  329. case PKEY_SIZE_AES_256:
  330. case PKEY_KEYTYPE_AES_256: /* older ioctls used this */
  331. keysize = 32;
  332. memcpy(preqparm->lv1.key_length, "KEYLN32 ", 8);
  333. break;
  334. default:
  335. DEBUG_ERR("%s unknown/unsupported keybitsize %d\n",
  336. __func__, keybitsize);
  337. rc = -EINVAL;
  338. goto out;
  339. }
  340. memcpy(preqparm->lv1.key_type1, "AESDATA ", 8);
  341. preqparm->lv2.len = sizeof(struct lv2);
  342. for (i = 0; i < 6; i++) {
  343. preqparm->lv2.keyid[i].len = sizeof(struct keyid);
  344. preqparm->lv2.keyid[i].attr = (i == 2 ? 0x30 : 0x10);
  345. }
  346. preqcblk->req_parml = sizeof(struct kgreqparm);
  347. /* fill xcrb struct */
  348. prep_xcrb(&xcrb, cardnr, preqcblk, prepcblk);
  349. /* forward xcrb with request CPRB and reply CPRB to zcrypt dd */
  350. rc = zcrypt_send_cprb(&xcrb);
  351. if (rc) {
  352. DEBUG_ERR("%s zcrypt_send_cprb (cardnr=%d domain=%d) failed, errno %d\n",
  353. __func__, (int)cardnr, (int)domain, rc);
  354. goto out;
  355. }
  356. /* check response returncode and reasoncode */
  357. if (prepcblk->ccp_rtcode != 0) {
  358. DEBUG_ERR("%s secure key generate failure, card response %d/%d\n",
  359. __func__,
  360. (int)prepcblk->ccp_rtcode,
  361. (int)prepcblk->ccp_rscode);
  362. rc = -EIO;
  363. goto out;
  364. }
  365. /* process response cprb param block */
  366. ptr = ((u8 *)prepcblk) + sizeof(struct CPRBX);
  367. prepcblk->rpl_parmb = (u8 __user *)ptr;
  368. prepparm = (struct kgrepparm *)ptr;
  369. /* check length of the returned secure key token */
  370. seckeysize = prepparm->lv3.keyblock.toklen
  371. - sizeof(prepparm->lv3.keyblock.toklen)
  372. - sizeof(prepparm->lv3.keyblock.tokattr);
  373. if (seckeysize != SECKEYBLOBSIZE) {
  374. DEBUG_ERR("%s secure token size mismatch %d != %d bytes\n",
  375. __func__, seckeysize, SECKEYBLOBSIZE);
  376. rc = -EIO;
  377. goto out;
  378. }
  379. /* check secure key token */
  380. rc = cca_check_secaeskeytoken(zcrypt_dbf_info, DBF_ERR,
  381. prepparm->lv3.keyblock.tok, 8 * keysize);
  382. if (rc) {
  383. rc = -EIO;
  384. goto out;
  385. }
  386. /* copy the generated secure key token */
  387. memcpy(seckey, prepparm->lv3.keyblock.tok, SECKEYBLOBSIZE);
  388. out:
  389. free_cprbmem(mem, PARMBSIZE, 0);
  390. return rc;
  391. }
  392. EXPORT_SYMBOL(cca_genseckey);
  393. /*
  394. * Generate an CCA AES DATA secure key with given key value.
  395. */
  396. int cca_clr2seckey(u16 cardnr, u16 domain, u32 keybitsize,
  397. const u8 *clrkey, u8 *seckey)
  398. {
  399. int rc, keysize, seckeysize;
  400. u8 *mem, *ptr;
  401. struct CPRBX *preqcblk, *prepcblk;
  402. struct ica_xcRB xcrb;
  403. struct cmreqparm {
  404. u8 subfunc_code[2];
  405. u16 rule_array_len;
  406. char rule_array[8];
  407. struct lv1 {
  408. u16 len;
  409. u8 clrkey[0];
  410. } lv1;
  411. struct lv2 {
  412. u16 len;
  413. struct keyid {
  414. u16 len;
  415. u16 attr;
  416. u8 data[SECKEYBLOBSIZE];
  417. } keyid;
  418. } lv2;
  419. } __packed * preqparm;
  420. struct lv2 *plv2;
  421. struct cmrepparm {
  422. u8 subfunc_code[2];
  423. u16 rule_array_len;
  424. struct lv3 {
  425. u16 len;
  426. u16 keyblocklen;
  427. struct {
  428. u16 toklen;
  429. u16 tokattr;
  430. u8 tok[];
  431. /* ... some more data ... */
  432. } keyblock;
  433. } lv3;
  434. } __packed * prepparm;
  435. /* get already prepared memory for 2 cprbs with param block each */
  436. rc = alloc_and_prep_cprbmem(PARMBSIZE, &mem, &preqcblk, &prepcblk);
  437. if (rc)
  438. return rc;
  439. /* fill request cprb struct */
  440. preqcblk->domain = domain;
  441. /* fill request cprb param block with CM request */
  442. preqparm = (struct cmreqparm __force *)preqcblk->req_parmb;
  443. memcpy(preqparm->subfunc_code, "CM", 2);
  444. memcpy(preqparm->rule_array, "AES ", 8);
  445. preqparm->rule_array_len =
  446. sizeof(preqparm->rule_array_len) + sizeof(preqparm->rule_array);
  447. switch (keybitsize) {
  448. case PKEY_SIZE_AES_128:
  449. case PKEY_KEYTYPE_AES_128: /* older ioctls used this */
  450. keysize = 16;
  451. break;
  452. case PKEY_SIZE_AES_192:
  453. case PKEY_KEYTYPE_AES_192: /* older ioctls used this */
  454. keysize = 24;
  455. break;
  456. case PKEY_SIZE_AES_256:
  457. case PKEY_KEYTYPE_AES_256: /* older ioctls used this */
  458. keysize = 32;
  459. break;
  460. default:
  461. DEBUG_ERR("%s unknown/unsupported keybitsize %d\n",
  462. __func__, keybitsize);
  463. rc = -EINVAL;
  464. goto out;
  465. }
  466. preqparm->lv1.len = sizeof(struct lv1) + keysize;
  467. memcpy(preqparm->lv1.clrkey, clrkey, keysize);
  468. plv2 = (struct lv2 *)(((u8 *)&preqparm->lv2) + keysize);
  469. plv2->len = sizeof(struct lv2);
  470. plv2->keyid.len = sizeof(struct keyid);
  471. plv2->keyid.attr = 0x30;
  472. preqcblk->req_parml = sizeof(struct cmreqparm) + keysize;
  473. /* fill xcrb struct */
  474. prep_xcrb(&xcrb, cardnr, preqcblk, prepcblk);
  475. /* forward xcrb with request CPRB and reply CPRB to zcrypt dd */
  476. rc = zcrypt_send_cprb(&xcrb);
  477. if (rc) {
  478. DEBUG_ERR("%s zcrypt_send_cprb (cardnr=%d domain=%d) failed, rc=%d\n",
  479. __func__, (int)cardnr, (int)domain, rc);
  480. goto out;
  481. }
  482. /* check response returncode and reasoncode */
  483. if (prepcblk->ccp_rtcode != 0) {
  484. DEBUG_ERR("%s clear key import failure, card response %d/%d\n",
  485. __func__,
  486. (int)prepcblk->ccp_rtcode,
  487. (int)prepcblk->ccp_rscode);
  488. rc = -EIO;
  489. goto out;
  490. }
  491. /* process response cprb param block */
  492. ptr = ((u8 *)prepcblk) + sizeof(struct CPRBX);
  493. prepcblk->rpl_parmb = (u8 __user *)ptr;
  494. prepparm = (struct cmrepparm *)ptr;
  495. /* check length of the returned secure key token */
  496. seckeysize = prepparm->lv3.keyblock.toklen
  497. - sizeof(prepparm->lv3.keyblock.toklen)
  498. - sizeof(prepparm->lv3.keyblock.tokattr);
  499. if (seckeysize != SECKEYBLOBSIZE) {
  500. DEBUG_ERR("%s secure token size mismatch %d != %d bytes\n",
  501. __func__, seckeysize, SECKEYBLOBSIZE);
  502. rc = -EIO;
  503. goto out;
  504. }
  505. /* check secure key token */
  506. rc = cca_check_secaeskeytoken(zcrypt_dbf_info, DBF_ERR,
  507. prepparm->lv3.keyblock.tok, 8 * keysize);
  508. if (rc) {
  509. rc = -EIO;
  510. goto out;
  511. }
  512. /* copy the generated secure key token */
  513. if (seckey)
  514. memcpy(seckey, prepparm->lv3.keyblock.tok, SECKEYBLOBSIZE);
  515. out:
  516. free_cprbmem(mem, PARMBSIZE, 1);
  517. return rc;
  518. }
  519. EXPORT_SYMBOL(cca_clr2seckey);
  520. /*
  521. * Derive proteced key from an CCA AES DATA secure key.
  522. */
  523. int cca_sec2protkey(u16 cardnr, u16 domain,
  524. const u8 *seckey, u8 *protkey, u32 *protkeylen,
  525. u32 *protkeytype)
  526. {
  527. int rc;
  528. u8 *mem, *ptr;
  529. struct CPRBX *preqcblk, *prepcblk;
  530. struct ica_xcRB xcrb;
  531. struct uskreqparm {
  532. u8 subfunc_code[2];
  533. u16 rule_array_len;
  534. struct lv1 {
  535. u16 len;
  536. u16 attr_len;
  537. u16 attr_flags;
  538. } lv1;
  539. struct lv2 {
  540. u16 len;
  541. u16 attr_len;
  542. u16 attr_flags;
  543. u8 token[]; /* cca secure key token */
  544. } lv2;
  545. } __packed * preqparm;
  546. struct uskrepparm {
  547. u8 subfunc_code[2];
  548. u16 rule_array_len;
  549. struct lv3 {
  550. u16 len;
  551. u16 attr_len;
  552. u16 attr_flags;
  553. struct cpacfkeyblock {
  554. u8 version; /* version of this struct */
  555. u8 flags[2];
  556. u8 algo;
  557. u8 form;
  558. u8 pad1[3];
  559. u16 len;
  560. u8 key[64]; /* the key (len bytes) */
  561. u16 keyattrlen;
  562. u8 keyattr[32];
  563. u8 pad2[1];
  564. u8 vptype;
  565. u8 vp[32]; /* verification pattern */
  566. } ckb;
  567. } lv3;
  568. } __packed * prepparm;
  569. /* get already prepared memory for 2 cprbs with param block each */
  570. rc = alloc_and_prep_cprbmem(PARMBSIZE, &mem, &preqcblk, &prepcblk);
  571. if (rc)
  572. return rc;
  573. /* fill request cprb struct */
  574. preqcblk->domain = domain;
  575. /* fill request cprb param block with USK request */
  576. preqparm = (struct uskreqparm __force *)preqcblk->req_parmb;
  577. memcpy(preqparm->subfunc_code, "US", 2);
  578. preqparm->rule_array_len = sizeof(preqparm->rule_array_len);
  579. preqparm->lv1.len = sizeof(struct lv1);
  580. preqparm->lv1.attr_len = sizeof(struct lv1) - sizeof(preqparm->lv1.len);
  581. preqparm->lv1.attr_flags = 0x0001;
  582. preqparm->lv2.len = sizeof(struct lv2) + SECKEYBLOBSIZE;
  583. preqparm->lv2.attr_len = sizeof(struct lv2)
  584. - sizeof(preqparm->lv2.len) + SECKEYBLOBSIZE;
  585. preqparm->lv2.attr_flags = 0x0000;
  586. memcpy(preqparm->lv2.token, seckey, SECKEYBLOBSIZE);
  587. preqcblk->req_parml = sizeof(struct uskreqparm) + SECKEYBLOBSIZE;
  588. /* fill xcrb struct */
  589. prep_xcrb(&xcrb, cardnr, preqcblk, prepcblk);
  590. /* forward xcrb with request CPRB and reply CPRB to zcrypt dd */
  591. rc = zcrypt_send_cprb(&xcrb);
  592. if (rc) {
  593. DEBUG_ERR("%s zcrypt_send_cprb (cardnr=%d domain=%d) failed, rc=%d\n",
  594. __func__, (int)cardnr, (int)domain, rc);
  595. goto out;
  596. }
  597. /* check response returncode and reasoncode */
  598. if (prepcblk->ccp_rtcode != 0) {
  599. DEBUG_ERR("%s unwrap secure key failure, card response %d/%d\n",
  600. __func__,
  601. (int)prepcblk->ccp_rtcode,
  602. (int)prepcblk->ccp_rscode);
  603. if (prepcblk->ccp_rtcode == 8 && prepcblk->ccp_rscode == 2290)
  604. rc = -EAGAIN;
  605. else
  606. rc = -EIO;
  607. goto out;
  608. }
  609. if (prepcblk->ccp_rscode != 0) {
  610. DEBUG_WARN("%s unwrap secure key warning, card response %d/%d\n",
  611. __func__,
  612. (int)prepcblk->ccp_rtcode,
  613. (int)prepcblk->ccp_rscode);
  614. }
  615. /* process response cprb param block */
  616. ptr = ((u8 *)prepcblk) + sizeof(struct CPRBX);
  617. prepcblk->rpl_parmb = (u8 __user *)ptr;
  618. prepparm = (struct uskrepparm *)ptr;
  619. /* check the returned keyblock */
  620. if (prepparm->lv3.ckb.version != 0x01 &&
  621. prepparm->lv3.ckb.version != 0x02) {
  622. DEBUG_ERR("%s reply param keyblock version mismatch 0x%02x\n",
  623. __func__, (int)prepparm->lv3.ckb.version);
  624. rc = -EIO;
  625. goto out;
  626. }
  627. /* copy the tanslated protected key */
  628. switch (prepparm->lv3.ckb.len) {
  629. case 16 + 32:
  630. /* AES 128 protected key */
  631. if (protkeytype)
  632. *protkeytype = PKEY_KEYTYPE_AES_128;
  633. break;
  634. case 24 + 32:
  635. /* AES 192 protected key */
  636. if (protkeytype)
  637. *protkeytype = PKEY_KEYTYPE_AES_192;
  638. break;
  639. case 32 + 32:
  640. /* AES 256 protected key */
  641. if (protkeytype)
  642. *protkeytype = PKEY_KEYTYPE_AES_256;
  643. break;
  644. default:
  645. DEBUG_ERR("%s unknown/unsupported keylen %d\n",
  646. __func__, prepparm->lv3.ckb.len);
  647. rc = -EIO;
  648. goto out;
  649. }
  650. memcpy(protkey, prepparm->lv3.ckb.key, prepparm->lv3.ckb.len);
  651. if (protkeylen)
  652. *protkeylen = prepparm->lv3.ckb.len;
  653. out:
  654. free_cprbmem(mem, PARMBSIZE, 0);
  655. return rc;
  656. }
  657. EXPORT_SYMBOL(cca_sec2protkey);
  658. /*
  659. * AES cipher key skeleton created with CSNBKTB2 with these flags:
  660. * INTERNAL, NO-KEY, AES, CIPHER, ANY-MODE, NOEX-SYM, NOEXAASY,
  661. * NOEXUASY, XPRTCPAC, NOEX-RAW, NOEX-DES, NOEX-AES, NOEX-RSA
  662. * used by cca_gencipherkey() and cca_clr2cipherkey().
  663. */
  664. static const u8 aes_cipher_key_skeleton[] = {
  665. 0x01, 0x00, 0x00, 0x38, 0x05, 0x00, 0x00, 0x00,
  666. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  667. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  668. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
  669. 0x00, 0x1a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  670. 0x00, 0x02, 0x00, 0x01, 0x02, 0xc0, 0x00, 0xff,
  671. 0x00, 0x03, 0x08, 0xc8, 0x00, 0x00, 0x00, 0x00 };
  672. #define SIZEOF_SKELETON (sizeof(aes_cipher_key_skeleton))
  673. /*
  674. * Generate (random) CCA AES CIPHER secure key.
  675. */
  676. int cca_gencipherkey(u16 cardnr, u16 domain, u32 keybitsize, u32 keygenflags,
  677. u8 *keybuf, size_t *keybufsize)
  678. {
  679. int rc;
  680. u8 *mem, *ptr;
  681. struct CPRBX *preqcblk, *prepcblk;
  682. struct ica_xcRB xcrb;
  683. struct gkreqparm {
  684. u8 subfunc_code[2];
  685. u16 rule_array_len;
  686. char rule_array[2 * 8];
  687. struct {
  688. u16 len;
  689. u8 key_type_1[8];
  690. u8 key_type_2[8];
  691. u16 clear_key_bit_len;
  692. u16 key_name_1_len;
  693. u16 key_name_2_len;
  694. u16 user_data_1_len;
  695. u16 user_data_2_len;
  696. u8 key_name_1[0];
  697. u8 key_name_2[0];
  698. u8 user_data_1[0];
  699. u8 user_data_2[0];
  700. } vud;
  701. struct {
  702. u16 len;
  703. struct {
  704. u16 len;
  705. u16 flag;
  706. u8 kek_id_1[0];
  707. } tlv1;
  708. struct {
  709. u16 len;
  710. u16 flag;
  711. u8 kek_id_2[0];
  712. } tlv2;
  713. struct {
  714. u16 len;
  715. u16 flag;
  716. u8 gen_key_id_1[SIZEOF_SKELETON];
  717. } tlv3;
  718. struct {
  719. u16 len;
  720. u16 flag;
  721. u8 gen_key_id_1_label[0];
  722. } tlv4;
  723. struct {
  724. u16 len;
  725. u16 flag;
  726. u8 gen_key_id_2[0];
  727. } tlv5;
  728. struct {
  729. u16 len;
  730. u16 flag;
  731. u8 gen_key_id_2_label[0];
  732. } tlv6;
  733. } kb;
  734. } __packed * preqparm;
  735. struct gkrepparm {
  736. u8 subfunc_code[2];
  737. u16 rule_array_len;
  738. struct {
  739. u16 len;
  740. } vud;
  741. struct {
  742. u16 len;
  743. struct {
  744. u16 len;
  745. u16 flag;
  746. u8 gen_key[0]; /* 120-136 bytes */
  747. } tlv1;
  748. } kb;
  749. } __packed * prepparm;
  750. struct cipherkeytoken *t;
  751. /* get already prepared memory for 2 cprbs with param block each */
  752. rc = alloc_and_prep_cprbmem(PARMBSIZE, &mem, &preqcblk, &prepcblk);
  753. if (rc)
  754. return rc;
  755. /* fill request cprb struct */
  756. preqcblk->domain = domain;
  757. preqcblk->req_parml = sizeof(struct gkreqparm);
  758. /* prepare request param block with GK request */
  759. preqparm = (struct gkreqparm __force *)preqcblk->req_parmb;
  760. memcpy(preqparm->subfunc_code, "GK", 2);
  761. preqparm->rule_array_len = sizeof(uint16_t) + 2 * 8;
  762. memcpy(preqparm->rule_array, "AES OP ", 2 * 8);
  763. /* prepare vud block */
  764. preqparm->vud.len = sizeof(preqparm->vud);
  765. switch (keybitsize) {
  766. case 128:
  767. case 192:
  768. case 256:
  769. break;
  770. default:
  771. DEBUG_ERR(
  772. "%s unknown/unsupported keybitsize %d\n",
  773. __func__, keybitsize);
  774. rc = -EINVAL;
  775. goto out;
  776. }
  777. preqparm->vud.clear_key_bit_len = keybitsize;
  778. memcpy(preqparm->vud.key_type_1, "TOKEN ", 8);
  779. memset(preqparm->vud.key_type_2, ' ', sizeof(preqparm->vud.key_type_2));
  780. /* prepare kb block */
  781. preqparm->kb.len = sizeof(preqparm->kb);
  782. preqparm->kb.tlv1.len = sizeof(preqparm->kb.tlv1);
  783. preqparm->kb.tlv1.flag = 0x0030;
  784. preqparm->kb.tlv2.len = sizeof(preqparm->kb.tlv2);
  785. preqparm->kb.tlv2.flag = 0x0030;
  786. preqparm->kb.tlv3.len = sizeof(preqparm->kb.tlv3);
  787. preqparm->kb.tlv3.flag = 0x0030;
  788. memcpy(preqparm->kb.tlv3.gen_key_id_1,
  789. aes_cipher_key_skeleton, SIZEOF_SKELETON);
  790. preqparm->kb.tlv4.len = sizeof(preqparm->kb.tlv4);
  791. preqparm->kb.tlv4.flag = 0x0030;
  792. preqparm->kb.tlv5.len = sizeof(preqparm->kb.tlv5);
  793. preqparm->kb.tlv5.flag = 0x0030;
  794. preqparm->kb.tlv6.len = sizeof(preqparm->kb.tlv6);
  795. preqparm->kb.tlv6.flag = 0x0030;
  796. /* patch the skeleton key token export flags inside the kb block */
  797. if (keygenflags) {
  798. t = (struct cipherkeytoken *)preqparm->kb.tlv3.gen_key_id_1;
  799. t->kmf1 |= (u16)(keygenflags & 0x0000FF00);
  800. t->kmf1 &= (u16)~(keygenflags & 0x000000FF);
  801. }
  802. /* prepare xcrb struct */
  803. prep_xcrb(&xcrb, cardnr, preqcblk, prepcblk);
  804. /* forward xcrb with request CPRB and reply CPRB to zcrypt dd */
  805. rc = zcrypt_send_cprb(&xcrb);
  806. if (rc) {
  807. DEBUG_ERR(
  808. "%s zcrypt_send_cprb (cardnr=%d domain=%d) failed, rc=%d\n",
  809. __func__, (int)cardnr, (int)domain, rc);
  810. goto out;
  811. }
  812. /* check response returncode and reasoncode */
  813. if (prepcblk->ccp_rtcode != 0) {
  814. DEBUG_ERR(
  815. "%s cipher key generate failure, card response %d/%d\n",
  816. __func__,
  817. (int)prepcblk->ccp_rtcode,
  818. (int)prepcblk->ccp_rscode);
  819. rc = -EIO;
  820. goto out;
  821. }
  822. /* process response cprb param block */
  823. ptr = ((u8 *)prepcblk) + sizeof(struct CPRBX);
  824. prepcblk->rpl_parmb = (u8 __user *)ptr;
  825. prepparm = (struct gkrepparm *)ptr;
  826. /* do some plausibility checks on the key block */
  827. if (prepparm->kb.len < 120 + 5 * sizeof(uint16_t) ||
  828. prepparm->kb.len > 136 + 5 * sizeof(uint16_t)) {
  829. DEBUG_ERR("%s reply with invalid or unknown key block\n",
  830. __func__);
  831. rc = -EIO;
  832. goto out;
  833. }
  834. /* and some checks on the generated key */
  835. rc = cca_check_secaescipherkey(zcrypt_dbf_info, DBF_ERR,
  836. prepparm->kb.tlv1.gen_key,
  837. keybitsize, 1);
  838. if (rc) {
  839. rc = -EIO;
  840. goto out;
  841. }
  842. /* copy the generated vlsc key token */
  843. t = (struct cipherkeytoken *)prepparm->kb.tlv1.gen_key;
  844. if (keybuf) {
  845. if (*keybufsize >= t->len)
  846. memcpy(keybuf, t, t->len);
  847. else
  848. rc = -EINVAL;
  849. }
  850. *keybufsize = t->len;
  851. out:
  852. free_cprbmem(mem, PARMBSIZE, 0);
  853. return rc;
  854. }
  855. EXPORT_SYMBOL(cca_gencipherkey);
  856. /*
  857. * Helper function, does a the CSNBKPI2 CPRB.
  858. */
  859. static int _ip_cprb_helper(u16 cardnr, u16 domain,
  860. const char *rule_array_1,
  861. const char *rule_array_2,
  862. const char *rule_array_3,
  863. const u8 *clr_key_value,
  864. int clr_key_bit_size,
  865. u8 *key_token,
  866. int *key_token_size)
  867. {
  868. int rc, n;
  869. u8 *mem, *ptr;
  870. struct CPRBX *preqcblk, *prepcblk;
  871. struct ica_xcRB xcrb;
  872. struct rule_array_block {
  873. u8 subfunc_code[2];
  874. u16 rule_array_len;
  875. char rule_array[0];
  876. } __packed * preq_ra_block;
  877. struct vud_block {
  878. u16 len;
  879. struct {
  880. u16 len;
  881. u16 flag; /* 0x0064 */
  882. u16 clr_key_bit_len;
  883. } tlv1;
  884. struct {
  885. u16 len;
  886. u16 flag; /* 0x0063 */
  887. u8 clr_key[0]; /* clear key value bytes */
  888. } tlv2;
  889. } __packed * preq_vud_block;
  890. struct key_block {
  891. u16 len;
  892. struct {
  893. u16 len;
  894. u16 flag; /* 0x0030 */
  895. u8 key_token[0]; /* key skeleton */
  896. } tlv1;
  897. } __packed * preq_key_block;
  898. struct iprepparm {
  899. u8 subfunc_code[2];
  900. u16 rule_array_len;
  901. struct {
  902. u16 len;
  903. } vud;
  904. struct {
  905. u16 len;
  906. struct {
  907. u16 len;
  908. u16 flag; /* 0x0030 */
  909. u8 key_token[0]; /* key token */
  910. } tlv1;
  911. } kb;
  912. } __packed * prepparm;
  913. struct cipherkeytoken *t;
  914. int complete = strncmp(rule_array_2, "COMPLETE", 8) ? 0 : 1;
  915. /* get already prepared memory for 2 cprbs with param block each */
  916. rc = alloc_and_prep_cprbmem(PARMBSIZE, &mem, &preqcblk, &prepcblk);
  917. if (rc)
  918. return rc;
  919. /* fill request cprb struct */
  920. preqcblk->domain = domain;
  921. preqcblk->req_parml = 0;
  922. /* prepare request param block with IP request */
  923. preq_ra_block = (struct rule_array_block __force *)preqcblk->req_parmb;
  924. memcpy(preq_ra_block->subfunc_code, "IP", 2);
  925. preq_ra_block->rule_array_len = sizeof(uint16_t) + 2 * 8;
  926. memcpy(preq_ra_block->rule_array, rule_array_1, 8);
  927. memcpy(preq_ra_block->rule_array + 8, rule_array_2, 8);
  928. preqcblk->req_parml = sizeof(struct rule_array_block) + 2 * 8;
  929. if (rule_array_3) {
  930. preq_ra_block->rule_array_len += 8;
  931. memcpy(preq_ra_block->rule_array + 16, rule_array_3, 8);
  932. preqcblk->req_parml += 8;
  933. }
  934. /* prepare vud block */
  935. preq_vud_block = (struct vud_block __force *)
  936. (preqcblk->req_parmb + preqcblk->req_parml);
  937. n = complete ? 0 : (clr_key_bit_size + 7) / 8;
  938. preq_vud_block->len = sizeof(struct vud_block) + n;
  939. preq_vud_block->tlv1.len = sizeof(preq_vud_block->tlv1);
  940. preq_vud_block->tlv1.flag = 0x0064;
  941. preq_vud_block->tlv1.clr_key_bit_len = complete ? 0 : clr_key_bit_size;
  942. preq_vud_block->tlv2.len = sizeof(preq_vud_block->tlv2) + n;
  943. preq_vud_block->tlv2.flag = 0x0063;
  944. if (!complete)
  945. memcpy(preq_vud_block->tlv2.clr_key, clr_key_value, n);
  946. preqcblk->req_parml += preq_vud_block->len;
  947. /* prepare key block */
  948. preq_key_block = (struct key_block __force *)
  949. (preqcblk->req_parmb + preqcblk->req_parml);
  950. n = *key_token_size;
  951. preq_key_block->len = sizeof(struct key_block) + n;
  952. preq_key_block->tlv1.len = sizeof(preq_key_block->tlv1) + n;
  953. preq_key_block->tlv1.flag = 0x0030;
  954. memcpy(preq_key_block->tlv1.key_token, key_token, *key_token_size);
  955. preqcblk->req_parml += preq_key_block->len;
  956. /* prepare xcrb struct */
  957. prep_xcrb(&xcrb, cardnr, preqcblk, prepcblk);
  958. /* forward xcrb with request CPRB and reply CPRB to zcrypt dd */
  959. rc = zcrypt_send_cprb(&xcrb);
  960. if (rc) {
  961. DEBUG_ERR(
  962. "%s zcrypt_send_cprb (cardnr=%d domain=%d) failed, rc=%d\n",
  963. __func__, (int)cardnr, (int)domain, rc);
  964. goto out;
  965. }
  966. /* check response returncode and reasoncode */
  967. if (prepcblk->ccp_rtcode != 0) {
  968. DEBUG_ERR(
  969. "%s CSNBKPI2 failure, card response %d/%d\n",
  970. __func__,
  971. (int)prepcblk->ccp_rtcode,
  972. (int)prepcblk->ccp_rscode);
  973. rc = -EIO;
  974. goto out;
  975. }
  976. /* process response cprb param block */
  977. ptr = ((u8 *)prepcblk) + sizeof(struct CPRBX);
  978. prepcblk->rpl_parmb = (u8 __user *)ptr;
  979. prepparm = (struct iprepparm *)ptr;
  980. /* do some plausibility checks on the key block */
  981. if (prepparm->kb.len < 120 + 3 * sizeof(uint16_t) ||
  982. prepparm->kb.len > 136 + 3 * sizeof(uint16_t)) {
  983. DEBUG_ERR("%s reply with invalid or unknown key block\n",
  984. __func__);
  985. rc = -EIO;
  986. goto out;
  987. }
  988. /* do not check the key here, it may be incomplete */
  989. /* copy the vlsc key token back */
  990. t = (struct cipherkeytoken *)prepparm->kb.tlv1.key_token;
  991. memcpy(key_token, t, t->len);
  992. *key_token_size = t->len;
  993. out:
  994. free_cprbmem(mem, PARMBSIZE, 0);
  995. return rc;
  996. }
  997. /*
  998. * Build CCA AES CIPHER secure key with a given clear key value.
  999. */
  1000. int cca_clr2cipherkey(u16 card, u16 dom, u32 keybitsize, u32 keygenflags,
  1001. const u8 *clrkey, u8 *keybuf, size_t *keybufsize)
  1002. {
  1003. int rc;
  1004. u8 *token;
  1005. int tokensize;
  1006. u8 exorbuf[32];
  1007. struct cipherkeytoken *t;
  1008. /* fill exorbuf with random data */
  1009. get_random_bytes(exorbuf, sizeof(exorbuf));
  1010. /* allocate space for the key token to build */
  1011. token = kmalloc(MAXCCAVLSCTOKENSIZE, GFP_KERNEL);
  1012. if (!token)
  1013. return -ENOMEM;
  1014. /* prepare the token with the key skeleton */
  1015. tokensize = SIZEOF_SKELETON;
  1016. memcpy(token, aes_cipher_key_skeleton, tokensize);
  1017. /* patch the skeleton key token export flags */
  1018. if (keygenflags) {
  1019. t = (struct cipherkeytoken *)token;
  1020. t->kmf1 |= (u16)(keygenflags & 0x0000FF00);
  1021. t->kmf1 &= (u16)~(keygenflags & 0x000000FF);
  1022. }
  1023. /*
  1024. * Do the key import with the clear key value in 4 steps:
  1025. * 1/4 FIRST import with only random data
  1026. * 2/4 EXOR the clear key
  1027. * 3/4 EXOR the very same random data again
  1028. * 4/4 COMPLETE the secure cipher key import
  1029. */
  1030. rc = _ip_cprb_helper(card, dom, "AES ", "FIRST ", "MIN3PART",
  1031. exorbuf, keybitsize, token, &tokensize);
  1032. if (rc) {
  1033. DEBUG_ERR(
  1034. "%s clear key import 1/4 with CSNBKPI2 failed, rc=%d\n",
  1035. __func__, rc);
  1036. goto out;
  1037. }
  1038. rc = _ip_cprb_helper(card, dom, "AES ", "ADD-PART", NULL,
  1039. clrkey, keybitsize, token, &tokensize);
  1040. if (rc) {
  1041. DEBUG_ERR(
  1042. "%s clear key import 2/4 with CSNBKPI2 failed, rc=%d\n",
  1043. __func__, rc);
  1044. goto out;
  1045. }
  1046. rc = _ip_cprb_helper(card, dom, "AES ", "ADD-PART", NULL,
  1047. exorbuf, keybitsize, token, &tokensize);
  1048. if (rc) {
  1049. DEBUG_ERR(
  1050. "%s clear key import 3/4 with CSNBKPI2 failed, rc=%d\n",
  1051. __func__, rc);
  1052. goto out;
  1053. }
  1054. rc = _ip_cprb_helper(card, dom, "AES ", "COMPLETE", NULL,
  1055. NULL, keybitsize, token, &tokensize);
  1056. if (rc) {
  1057. DEBUG_ERR(
  1058. "%s clear key import 4/4 with CSNBKPI2 failed, rc=%d\n",
  1059. __func__, rc);
  1060. goto out;
  1061. }
  1062. /* copy the generated key token */
  1063. if (keybuf) {
  1064. if (tokensize > *keybufsize)
  1065. rc = -EINVAL;
  1066. else
  1067. memcpy(keybuf, token, tokensize);
  1068. }
  1069. *keybufsize = tokensize;
  1070. out:
  1071. kfree(token);
  1072. return rc;
  1073. }
  1074. EXPORT_SYMBOL(cca_clr2cipherkey);
  1075. /*
  1076. * Derive proteced key from CCA AES cipher secure key.
  1077. */
  1078. int cca_cipher2protkey(u16 cardnr, u16 domain, const u8 *ckey,
  1079. u8 *protkey, u32 *protkeylen, u32 *protkeytype)
  1080. {
  1081. int rc;
  1082. u8 *mem, *ptr;
  1083. struct CPRBX *preqcblk, *prepcblk;
  1084. struct ica_xcRB xcrb;
  1085. struct aureqparm {
  1086. u8 subfunc_code[2];
  1087. u16 rule_array_len;
  1088. u8 rule_array[8];
  1089. struct {
  1090. u16 len;
  1091. u16 tk_blob_len;
  1092. u16 tk_blob_tag;
  1093. u8 tk_blob[66];
  1094. } vud;
  1095. struct {
  1096. u16 len;
  1097. u16 cca_key_token_len;
  1098. u16 cca_key_token_flags;
  1099. u8 cca_key_token[0]; // 64 or more
  1100. } kb;
  1101. } __packed * preqparm;
  1102. struct aurepparm {
  1103. u8 subfunc_code[2];
  1104. u16 rule_array_len;
  1105. struct {
  1106. u16 len;
  1107. u16 sublen;
  1108. u16 tag;
  1109. struct cpacfkeyblock {
  1110. u8 version; /* version of this struct */
  1111. u8 flags[2];
  1112. u8 algo;
  1113. u8 form;
  1114. u8 pad1[3];
  1115. u16 keylen;
  1116. u8 key[64]; /* the key (keylen bytes) */
  1117. u16 keyattrlen;
  1118. u8 keyattr[32];
  1119. u8 pad2[1];
  1120. u8 vptype;
  1121. u8 vp[32]; /* verification pattern */
  1122. } ckb;
  1123. } vud;
  1124. struct {
  1125. u16 len;
  1126. } kb;
  1127. } __packed * prepparm;
  1128. int keytoklen = ((struct cipherkeytoken *)ckey)->len;
  1129. /* get already prepared memory for 2 cprbs with param block each */
  1130. rc = alloc_and_prep_cprbmem(PARMBSIZE, &mem, &preqcblk, &prepcblk);
  1131. if (rc)
  1132. return rc;
  1133. /* fill request cprb struct */
  1134. preqcblk->domain = domain;
  1135. /* fill request cprb param block with AU request */
  1136. preqparm = (struct aureqparm __force *)preqcblk->req_parmb;
  1137. memcpy(preqparm->subfunc_code, "AU", 2);
  1138. preqparm->rule_array_len =
  1139. sizeof(preqparm->rule_array_len)
  1140. + sizeof(preqparm->rule_array);
  1141. memcpy(preqparm->rule_array, "EXPT-SK ", 8);
  1142. /* vud, tk blob */
  1143. preqparm->vud.len = sizeof(preqparm->vud);
  1144. preqparm->vud.tk_blob_len = sizeof(preqparm->vud.tk_blob)
  1145. + 2 * sizeof(uint16_t);
  1146. preqparm->vud.tk_blob_tag = 0x00C2;
  1147. /* kb, cca token */
  1148. preqparm->kb.len = keytoklen + 3 * sizeof(uint16_t);
  1149. preqparm->kb.cca_key_token_len = keytoklen + 2 * sizeof(uint16_t);
  1150. memcpy(preqparm->kb.cca_key_token, ckey, keytoklen);
  1151. /* now fill length of param block into cprb */
  1152. preqcblk->req_parml = sizeof(struct aureqparm) + keytoklen;
  1153. /* fill xcrb struct */
  1154. prep_xcrb(&xcrb, cardnr, preqcblk, prepcblk);
  1155. /* forward xcrb with request CPRB and reply CPRB to zcrypt dd */
  1156. rc = zcrypt_send_cprb(&xcrb);
  1157. if (rc) {
  1158. DEBUG_ERR(
  1159. "%s zcrypt_send_cprb (cardnr=%d domain=%d) failed, rc=%d\n",
  1160. __func__, (int)cardnr, (int)domain, rc);
  1161. goto out;
  1162. }
  1163. /* check response returncode and reasoncode */
  1164. if (prepcblk->ccp_rtcode != 0) {
  1165. DEBUG_ERR(
  1166. "%s unwrap secure key failure, card response %d/%d\n",
  1167. __func__,
  1168. (int)prepcblk->ccp_rtcode,
  1169. (int)prepcblk->ccp_rscode);
  1170. if (prepcblk->ccp_rtcode == 8 && prepcblk->ccp_rscode == 2290)
  1171. rc = -EAGAIN;
  1172. else
  1173. rc = -EIO;
  1174. goto out;
  1175. }
  1176. if (prepcblk->ccp_rscode != 0) {
  1177. DEBUG_WARN(
  1178. "%s unwrap secure key warning, card response %d/%d\n",
  1179. __func__,
  1180. (int)prepcblk->ccp_rtcode,
  1181. (int)prepcblk->ccp_rscode);
  1182. }
  1183. /* process response cprb param block */
  1184. ptr = ((u8 *)prepcblk) + sizeof(struct CPRBX);
  1185. prepcblk->rpl_parmb = (u8 __user *)ptr;
  1186. prepparm = (struct aurepparm *)ptr;
  1187. /* check the returned keyblock */
  1188. if (prepparm->vud.ckb.version != 0x01 &&
  1189. prepparm->vud.ckb.version != 0x02) {
  1190. DEBUG_ERR("%s reply param keyblock version mismatch 0x%02x\n",
  1191. __func__, (int)prepparm->vud.ckb.version);
  1192. rc = -EIO;
  1193. goto out;
  1194. }
  1195. if (prepparm->vud.ckb.algo != 0x02) {
  1196. DEBUG_ERR(
  1197. "%s reply param keyblock algo mismatch 0x%02x != 0x02\n",
  1198. __func__, (int)prepparm->vud.ckb.algo);
  1199. rc = -EIO;
  1200. goto out;
  1201. }
  1202. /* copy the translated protected key */
  1203. switch (prepparm->vud.ckb.keylen) {
  1204. case 16 + 32:
  1205. /* AES 128 protected key */
  1206. if (protkeytype)
  1207. *protkeytype = PKEY_KEYTYPE_AES_128;
  1208. break;
  1209. case 24 + 32:
  1210. /* AES 192 protected key */
  1211. if (protkeytype)
  1212. *protkeytype = PKEY_KEYTYPE_AES_192;
  1213. break;
  1214. case 32 + 32:
  1215. /* AES 256 protected key */
  1216. if (protkeytype)
  1217. *protkeytype = PKEY_KEYTYPE_AES_256;
  1218. break;
  1219. default:
  1220. DEBUG_ERR("%s unknown/unsupported keylen %d\n",
  1221. __func__, prepparm->vud.ckb.keylen);
  1222. rc = -EIO;
  1223. goto out;
  1224. }
  1225. memcpy(protkey, prepparm->vud.ckb.key, prepparm->vud.ckb.keylen);
  1226. if (protkeylen)
  1227. *protkeylen = prepparm->vud.ckb.keylen;
  1228. out:
  1229. free_cprbmem(mem, PARMBSIZE, 0);
  1230. return rc;
  1231. }
  1232. EXPORT_SYMBOL(cca_cipher2protkey);
  1233. /*
  1234. * Derive protected key from CCA ECC secure private key.
  1235. */
  1236. int cca_ecc2protkey(u16 cardnr, u16 domain, const u8 *key,
  1237. u8 *protkey, u32 *protkeylen, u32 *protkeytype)
  1238. {
  1239. int rc;
  1240. u8 *mem, *ptr;
  1241. struct CPRBX *preqcblk, *prepcblk;
  1242. struct ica_xcRB xcrb;
  1243. struct aureqparm {
  1244. u8 subfunc_code[2];
  1245. u16 rule_array_len;
  1246. u8 rule_array[8];
  1247. struct {
  1248. u16 len;
  1249. u16 tk_blob_len;
  1250. u16 tk_blob_tag;
  1251. u8 tk_blob[66];
  1252. } vud;
  1253. struct {
  1254. u16 len;
  1255. u16 cca_key_token_len;
  1256. u16 cca_key_token_flags;
  1257. u8 cca_key_token[0];
  1258. } kb;
  1259. } __packed * preqparm;
  1260. struct aurepparm {
  1261. u8 subfunc_code[2];
  1262. u16 rule_array_len;
  1263. struct {
  1264. u16 len;
  1265. u16 sublen;
  1266. u16 tag;
  1267. struct cpacfkeyblock {
  1268. u8 version; /* version of this struct */
  1269. u8 flags[2];
  1270. u8 algo;
  1271. u8 form;
  1272. u8 pad1[3];
  1273. u16 keylen;
  1274. u8 key[0]; /* the key (keylen bytes) */
  1275. u16 keyattrlen;
  1276. u8 keyattr[32];
  1277. u8 pad2[1];
  1278. u8 vptype;
  1279. u8 vp[32]; /* verification pattern */
  1280. } ckb;
  1281. } vud;
  1282. struct {
  1283. u16 len;
  1284. } kb;
  1285. } __packed * prepparm;
  1286. int keylen = ((struct eccprivkeytoken *)key)->len;
  1287. /* get already prepared memory for 2 cprbs with param block each */
  1288. rc = alloc_and_prep_cprbmem(PARMBSIZE, &mem, &preqcblk, &prepcblk);
  1289. if (rc)
  1290. return rc;
  1291. /* fill request cprb struct */
  1292. preqcblk->domain = domain;
  1293. /* fill request cprb param block with AU request */
  1294. preqparm = (struct aureqparm __force *)preqcblk->req_parmb;
  1295. memcpy(preqparm->subfunc_code, "AU", 2);
  1296. preqparm->rule_array_len =
  1297. sizeof(preqparm->rule_array_len)
  1298. + sizeof(preqparm->rule_array);
  1299. memcpy(preqparm->rule_array, "EXPT-SK ", 8);
  1300. /* vud, tk blob */
  1301. preqparm->vud.len = sizeof(preqparm->vud);
  1302. preqparm->vud.tk_blob_len = sizeof(preqparm->vud.tk_blob)
  1303. + 2 * sizeof(uint16_t);
  1304. preqparm->vud.tk_blob_tag = 0x00C2;
  1305. /* kb, cca token */
  1306. preqparm->kb.len = keylen + 3 * sizeof(uint16_t);
  1307. preqparm->kb.cca_key_token_len = keylen + 2 * sizeof(uint16_t);
  1308. memcpy(preqparm->kb.cca_key_token, key, keylen);
  1309. /* now fill length of param block into cprb */
  1310. preqcblk->req_parml = sizeof(struct aureqparm) + keylen;
  1311. /* fill xcrb struct */
  1312. prep_xcrb(&xcrb, cardnr, preqcblk, prepcblk);
  1313. /* forward xcrb with request CPRB and reply CPRB to zcrypt dd */
  1314. rc = zcrypt_send_cprb(&xcrb);
  1315. if (rc) {
  1316. DEBUG_ERR(
  1317. "%s zcrypt_send_cprb (cardnr=%d domain=%d) failed, rc=%d\n",
  1318. __func__, (int)cardnr, (int)domain, rc);
  1319. goto out;
  1320. }
  1321. /* check response returncode and reasoncode */
  1322. if (prepcblk->ccp_rtcode != 0) {
  1323. DEBUG_ERR(
  1324. "%s unwrap secure key failure, card response %d/%d\n",
  1325. __func__,
  1326. (int)prepcblk->ccp_rtcode,
  1327. (int)prepcblk->ccp_rscode);
  1328. if (prepcblk->ccp_rtcode == 8 && prepcblk->ccp_rscode == 2290)
  1329. rc = -EAGAIN;
  1330. else
  1331. rc = -EIO;
  1332. goto out;
  1333. }
  1334. if (prepcblk->ccp_rscode != 0) {
  1335. DEBUG_WARN(
  1336. "%s unwrap secure key warning, card response %d/%d\n",
  1337. __func__,
  1338. (int)prepcblk->ccp_rtcode,
  1339. (int)prepcblk->ccp_rscode);
  1340. }
  1341. /* process response cprb param block */
  1342. ptr = ((u8 *)prepcblk) + sizeof(struct CPRBX);
  1343. prepcblk->rpl_parmb = (u8 __user *)ptr;
  1344. prepparm = (struct aurepparm *)ptr;
  1345. /* check the returned keyblock */
  1346. if (prepparm->vud.ckb.version != 0x02) {
  1347. DEBUG_ERR("%s reply param keyblock version mismatch 0x%02x != 0x02\n",
  1348. __func__, (int)prepparm->vud.ckb.version);
  1349. rc = -EIO;
  1350. goto out;
  1351. }
  1352. if (prepparm->vud.ckb.algo != 0x81) {
  1353. DEBUG_ERR(
  1354. "%s reply param keyblock algo mismatch 0x%02x != 0x81\n",
  1355. __func__, (int)prepparm->vud.ckb.algo);
  1356. rc = -EIO;
  1357. goto out;
  1358. }
  1359. /* copy the translated protected key */
  1360. if (prepparm->vud.ckb.keylen > *protkeylen) {
  1361. DEBUG_ERR("%s prot keylen mismatch %d > buffersize %u\n",
  1362. __func__, prepparm->vud.ckb.keylen, *protkeylen);
  1363. rc = -EIO;
  1364. goto out;
  1365. }
  1366. memcpy(protkey, prepparm->vud.ckb.key, prepparm->vud.ckb.keylen);
  1367. *protkeylen = prepparm->vud.ckb.keylen;
  1368. if (protkeytype)
  1369. *protkeytype = PKEY_KEYTYPE_ECC;
  1370. out:
  1371. free_cprbmem(mem, PARMBSIZE, 0);
  1372. return rc;
  1373. }
  1374. EXPORT_SYMBOL(cca_ecc2protkey);
  1375. /*
  1376. * query cryptographic facility from CCA adapter
  1377. */
  1378. int cca_query_crypto_facility(u16 cardnr, u16 domain,
  1379. const char *keyword,
  1380. u8 *rarray, size_t *rarraylen,
  1381. u8 *varray, size_t *varraylen)
  1382. {
  1383. int rc;
  1384. u16 len;
  1385. u8 *mem, *ptr;
  1386. struct CPRBX *preqcblk, *prepcblk;
  1387. struct ica_xcRB xcrb;
  1388. struct fqreqparm {
  1389. u8 subfunc_code[2];
  1390. u16 rule_array_len;
  1391. char rule_array[8];
  1392. struct lv1 {
  1393. u16 len;
  1394. u8 data[VARDATASIZE];
  1395. } lv1;
  1396. u16 dummylen;
  1397. } __packed * preqparm;
  1398. size_t parmbsize = sizeof(struct fqreqparm);
  1399. struct fqrepparm {
  1400. u8 subfunc_code[2];
  1401. u8 lvdata[0];
  1402. } __packed * prepparm;
  1403. /* get already prepared memory for 2 cprbs with param block each */
  1404. rc = alloc_and_prep_cprbmem(parmbsize, &mem, &preqcblk, &prepcblk);
  1405. if (rc)
  1406. return rc;
  1407. /* fill request cprb struct */
  1408. preqcblk->domain = domain;
  1409. /* fill request cprb param block with FQ request */
  1410. preqparm = (struct fqreqparm __force *)preqcblk->req_parmb;
  1411. memcpy(preqparm->subfunc_code, "FQ", 2);
  1412. memcpy(preqparm->rule_array, keyword, sizeof(preqparm->rule_array));
  1413. preqparm->rule_array_len =
  1414. sizeof(preqparm->rule_array_len) + sizeof(preqparm->rule_array);
  1415. preqparm->lv1.len = sizeof(preqparm->lv1);
  1416. preqparm->dummylen = sizeof(preqparm->dummylen);
  1417. preqcblk->req_parml = parmbsize;
  1418. /* fill xcrb struct */
  1419. prep_xcrb(&xcrb, cardnr, preqcblk, prepcblk);
  1420. /* forward xcrb with request CPRB and reply CPRB to zcrypt dd */
  1421. rc = zcrypt_send_cprb(&xcrb);
  1422. if (rc) {
  1423. DEBUG_ERR("%s zcrypt_send_cprb (cardnr=%d domain=%d) failed, rc=%d\n",
  1424. __func__, (int)cardnr, (int)domain, rc);
  1425. goto out;
  1426. }
  1427. /* check response returncode and reasoncode */
  1428. if (prepcblk->ccp_rtcode != 0) {
  1429. DEBUG_ERR("%s unwrap secure key failure, card response %d/%d\n",
  1430. __func__,
  1431. (int)prepcblk->ccp_rtcode,
  1432. (int)prepcblk->ccp_rscode);
  1433. rc = -EIO;
  1434. goto out;
  1435. }
  1436. /* process response cprb param block */
  1437. ptr = ((u8 *)prepcblk) + sizeof(struct CPRBX);
  1438. prepcblk->rpl_parmb = (u8 __user *)ptr;
  1439. prepparm = (struct fqrepparm *)ptr;
  1440. ptr = prepparm->lvdata;
  1441. /* check and possibly copy reply rule array */
  1442. len = *((u16 *)ptr);
  1443. if (len > sizeof(u16)) {
  1444. ptr += sizeof(u16);
  1445. len -= sizeof(u16);
  1446. if (rarray && rarraylen && *rarraylen > 0) {
  1447. *rarraylen = (len > *rarraylen ? *rarraylen : len);
  1448. memcpy(rarray, ptr, *rarraylen);
  1449. }
  1450. ptr += len;
  1451. }
  1452. /* check and possible copy reply var array */
  1453. len = *((u16 *)ptr);
  1454. if (len > sizeof(u16)) {
  1455. ptr += sizeof(u16);
  1456. len -= sizeof(u16);
  1457. if (varray && varraylen && *varraylen > 0) {
  1458. *varraylen = (len > *varraylen ? *varraylen : len);
  1459. memcpy(varray, ptr, *varraylen);
  1460. }
  1461. ptr += len;
  1462. }
  1463. out:
  1464. free_cprbmem(mem, parmbsize, 0);
  1465. return rc;
  1466. }
  1467. EXPORT_SYMBOL(cca_query_crypto_facility);
  1468. static int cca_info_cache_fetch(u16 cardnr, u16 domain, struct cca_info *ci)
  1469. {
  1470. int rc = -ENOENT;
  1471. struct cca_info_list_entry *ptr;
  1472. spin_lock_bh(&cca_info_list_lock);
  1473. list_for_each_entry(ptr, &cca_info_list, list) {
  1474. if (ptr->cardnr == cardnr && ptr->domain == domain) {
  1475. memcpy(ci, &ptr->info, sizeof(*ci));
  1476. rc = 0;
  1477. break;
  1478. }
  1479. }
  1480. spin_unlock_bh(&cca_info_list_lock);
  1481. return rc;
  1482. }
  1483. static void cca_info_cache_update(u16 cardnr, u16 domain,
  1484. const struct cca_info *ci)
  1485. {
  1486. int found = 0;
  1487. struct cca_info_list_entry *ptr;
  1488. spin_lock_bh(&cca_info_list_lock);
  1489. list_for_each_entry(ptr, &cca_info_list, list) {
  1490. if (ptr->cardnr == cardnr &&
  1491. ptr->domain == domain) {
  1492. memcpy(&ptr->info, ci, sizeof(*ci));
  1493. found = 1;
  1494. break;
  1495. }
  1496. }
  1497. if (!found) {
  1498. ptr = kmalloc(sizeof(*ptr), GFP_ATOMIC);
  1499. if (!ptr) {
  1500. spin_unlock_bh(&cca_info_list_lock);
  1501. return;
  1502. }
  1503. ptr->cardnr = cardnr;
  1504. ptr->domain = domain;
  1505. memcpy(&ptr->info, ci, sizeof(*ci));
  1506. list_add(&ptr->list, &cca_info_list);
  1507. }
  1508. spin_unlock_bh(&cca_info_list_lock);
  1509. }
  1510. static void cca_info_cache_scrub(u16 cardnr, u16 domain)
  1511. {
  1512. struct cca_info_list_entry *ptr;
  1513. spin_lock_bh(&cca_info_list_lock);
  1514. list_for_each_entry(ptr, &cca_info_list, list) {
  1515. if (ptr->cardnr == cardnr &&
  1516. ptr->domain == domain) {
  1517. list_del(&ptr->list);
  1518. kfree(ptr);
  1519. break;
  1520. }
  1521. }
  1522. spin_unlock_bh(&cca_info_list_lock);
  1523. }
  1524. static void __exit mkvp_cache_free(void)
  1525. {
  1526. struct cca_info_list_entry *ptr, *pnext;
  1527. spin_lock_bh(&cca_info_list_lock);
  1528. list_for_each_entry_safe(ptr, pnext, &cca_info_list, list) {
  1529. list_del(&ptr->list);
  1530. kfree(ptr);
  1531. }
  1532. spin_unlock_bh(&cca_info_list_lock);
  1533. }
  1534. /*
  1535. * Fetch cca_info values via query_crypto_facility from adapter.
  1536. */
  1537. static int fetch_cca_info(u16 cardnr, u16 domain, struct cca_info *ci)
  1538. {
  1539. int rc, found = 0;
  1540. size_t rlen, vlen;
  1541. u8 *rarray, *varray, *pg;
  1542. struct zcrypt_device_status_ext devstat;
  1543. memset(ci, 0, sizeof(*ci));
  1544. /* get first info from zcrypt device driver about this apqn */
  1545. rc = zcrypt_device_status_ext(cardnr, domain, &devstat);
  1546. if (rc)
  1547. return rc;
  1548. ci->hwtype = devstat.hwtype;
  1549. /* prep page for rule array and var array use */
  1550. pg = (u8 *)__get_free_page(GFP_KERNEL);
  1551. if (!pg)
  1552. return -ENOMEM;
  1553. rarray = pg;
  1554. varray = pg + PAGE_SIZE / 2;
  1555. rlen = vlen = PAGE_SIZE / 2;
  1556. /* QF for this card/domain */
  1557. rc = cca_query_crypto_facility(cardnr, domain, "STATICSA",
  1558. rarray, &rlen, varray, &vlen);
  1559. if (rc == 0 && rlen >= 10 * 8 && vlen >= 204) {
  1560. memcpy(ci->serial, rarray, 8);
  1561. ci->new_asym_mk_state = (char)rarray[4 * 8];
  1562. ci->cur_asym_mk_state = (char)rarray[5 * 8];
  1563. ci->old_asym_mk_state = (char)rarray[6 * 8];
  1564. if (ci->old_asym_mk_state == '2')
  1565. memcpy(ci->old_asym_mkvp, varray + 64, 16);
  1566. if (ci->cur_asym_mk_state == '2')
  1567. memcpy(ci->cur_asym_mkvp, varray + 84, 16);
  1568. if (ci->new_asym_mk_state == '3')
  1569. memcpy(ci->new_asym_mkvp, varray + 104, 16);
  1570. ci->new_aes_mk_state = (char)rarray[7 * 8];
  1571. ci->cur_aes_mk_state = (char)rarray[8 * 8];
  1572. ci->old_aes_mk_state = (char)rarray[9 * 8];
  1573. if (ci->old_aes_mk_state == '2')
  1574. memcpy(&ci->old_aes_mkvp, varray + 172, 8);
  1575. if (ci->cur_aes_mk_state == '2')
  1576. memcpy(&ci->cur_aes_mkvp, varray + 184, 8);
  1577. if (ci->new_aes_mk_state == '3')
  1578. memcpy(&ci->new_aes_mkvp, varray + 196, 8);
  1579. found++;
  1580. }
  1581. if (!found)
  1582. goto out;
  1583. rlen = vlen = PAGE_SIZE / 2;
  1584. rc = cca_query_crypto_facility(cardnr, domain, "STATICSB",
  1585. rarray, &rlen, varray, &vlen);
  1586. if (rc == 0 && rlen >= 13 * 8 && vlen >= 240) {
  1587. ci->new_apka_mk_state = (char)rarray[10 * 8];
  1588. ci->cur_apka_mk_state = (char)rarray[11 * 8];
  1589. ci->old_apka_mk_state = (char)rarray[12 * 8];
  1590. if (ci->old_apka_mk_state == '2')
  1591. memcpy(&ci->old_apka_mkvp, varray + 208, 8);
  1592. if (ci->cur_apka_mk_state == '2')
  1593. memcpy(&ci->cur_apka_mkvp, varray + 220, 8);
  1594. if (ci->new_apka_mk_state == '3')
  1595. memcpy(&ci->new_apka_mkvp, varray + 232, 8);
  1596. found++;
  1597. }
  1598. out:
  1599. free_page((unsigned long)pg);
  1600. return found == 2 ? 0 : -ENOENT;
  1601. }
  1602. /*
  1603. * Fetch cca information about a CCA queue.
  1604. */
  1605. int cca_get_info(u16 card, u16 dom, struct cca_info *ci, int verify)
  1606. {
  1607. int rc;
  1608. rc = cca_info_cache_fetch(card, dom, ci);
  1609. if (rc || verify) {
  1610. rc = fetch_cca_info(card, dom, ci);
  1611. if (rc == 0)
  1612. cca_info_cache_update(card, dom, ci);
  1613. }
  1614. return rc;
  1615. }
  1616. EXPORT_SYMBOL(cca_get_info);
  1617. /*
  1618. * Search for a matching crypto card based on the
  1619. * Master Key Verification Pattern given.
  1620. */
  1621. static int findcard(u64 mkvp, u16 *pcardnr, u16 *pdomain,
  1622. int verify, int minhwtype)
  1623. {
  1624. struct zcrypt_device_status_ext *device_status;
  1625. u16 card, dom;
  1626. struct cca_info ci;
  1627. int i, rc, oi = -1;
  1628. /* mkvp must not be zero, minhwtype needs to be >= 0 */
  1629. if (mkvp == 0 || minhwtype < 0)
  1630. return -EINVAL;
  1631. /* fetch status of all crypto cards */
  1632. device_status = kvmalloc_array(MAX_ZDEV_ENTRIES_EXT,
  1633. sizeof(struct zcrypt_device_status_ext),
  1634. GFP_KERNEL);
  1635. if (!device_status)
  1636. return -ENOMEM;
  1637. zcrypt_device_status_mask_ext(device_status);
  1638. /* walk through all crypto cards */
  1639. for (i = 0; i < MAX_ZDEV_ENTRIES_EXT; i++) {
  1640. card = AP_QID_CARD(device_status[i].qid);
  1641. dom = AP_QID_QUEUE(device_status[i].qid);
  1642. if (device_status[i].online &&
  1643. device_status[i].functions & 0x04) {
  1644. /* enabled CCA card, check current mkvp from cache */
  1645. if (cca_info_cache_fetch(card, dom, &ci) == 0 &&
  1646. ci.hwtype >= minhwtype &&
  1647. ci.cur_aes_mk_state == '2' &&
  1648. ci.cur_aes_mkvp == mkvp) {
  1649. if (!verify)
  1650. break;
  1651. /* verify: refresh card info */
  1652. if (fetch_cca_info(card, dom, &ci) == 0) {
  1653. cca_info_cache_update(card, dom, &ci);
  1654. if (ci.hwtype >= minhwtype &&
  1655. ci.cur_aes_mk_state == '2' &&
  1656. ci.cur_aes_mkvp == mkvp)
  1657. break;
  1658. }
  1659. }
  1660. } else {
  1661. /* Card is offline and/or not a CCA card. */
  1662. /* del mkvp entry from cache if it exists */
  1663. cca_info_cache_scrub(card, dom);
  1664. }
  1665. }
  1666. if (i >= MAX_ZDEV_ENTRIES_EXT) {
  1667. /* nothing found, so this time without cache */
  1668. for (i = 0; i < MAX_ZDEV_ENTRIES_EXT; i++) {
  1669. if (!(device_status[i].online &&
  1670. device_status[i].functions & 0x04))
  1671. continue;
  1672. card = AP_QID_CARD(device_status[i].qid);
  1673. dom = AP_QID_QUEUE(device_status[i].qid);
  1674. /* fresh fetch mkvp from adapter */
  1675. if (fetch_cca_info(card, dom, &ci) == 0) {
  1676. cca_info_cache_update(card, dom, &ci);
  1677. if (ci.hwtype >= minhwtype &&
  1678. ci.cur_aes_mk_state == '2' &&
  1679. ci.cur_aes_mkvp == mkvp)
  1680. break;
  1681. if (ci.hwtype >= minhwtype &&
  1682. ci.old_aes_mk_state == '2' &&
  1683. ci.old_aes_mkvp == mkvp &&
  1684. oi < 0)
  1685. oi = i;
  1686. }
  1687. }
  1688. if (i >= MAX_ZDEV_ENTRIES_EXT && oi >= 0) {
  1689. /* old mkvp matched, use this card then */
  1690. card = AP_QID_CARD(device_status[oi].qid);
  1691. dom = AP_QID_QUEUE(device_status[oi].qid);
  1692. }
  1693. }
  1694. if (i < MAX_ZDEV_ENTRIES_EXT || oi >= 0) {
  1695. if (pcardnr)
  1696. *pcardnr = card;
  1697. if (pdomain)
  1698. *pdomain = dom;
  1699. rc = (i < MAX_ZDEV_ENTRIES_EXT ? 0 : 1);
  1700. } else {
  1701. rc = -ENODEV;
  1702. }
  1703. kvfree(device_status);
  1704. return rc;
  1705. }
  1706. /*
  1707. * Search for a matching crypto card based on the Master Key
  1708. * Verification Pattern provided inside a secure key token.
  1709. */
  1710. int cca_findcard(const u8 *key, u16 *pcardnr, u16 *pdomain, int verify)
  1711. {
  1712. u64 mkvp;
  1713. int minhwtype = 0;
  1714. const struct keytoken_header *hdr = (struct keytoken_header *)key;
  1715. if (hdr->type != TOKTYPE_CCA_INTERNAL)
  1716. return -EINVAL;
  1717. switch (hdr->version) {
  1718. case TOKVER_CCA_AES:
  1719. mkvp = ((struct secaeskeytoken *)key)->mkvp;
  1720. break;
  1721. case TOKVER_CCA_VLSC:
  1722. mkvp = ((struct cipherkeytoken *)key)->mkvp0;
  1723. minhwtype = AP_DEVICE_TYPE_CEX6;
  1724. break;
  1725. default:
  1726. return -EINVAL;
  1727. }
  1728. return findcard(mkvp, pcardnr, pdomain, verify, minhwtype);
  1729. }
  1730. EXPORT_SYMBOL(cca_findcard);
  1731. int cca_findcard2(u32 **apqns, u32 *nr_apqns, u16 cardnr, u16 domain,
  1732. int minhwtype, int mktype, u64 cur_mkvp, u64 old_mkvp,
  1733. int verify)
  1734. {
  1735. struct zcrypt_device_status_ext *device_status;
  1736. u32 *_apqns = NULL, _nr_apqns = 0;
  1737. int i, card, dom, curmatch, oldmatch, rc = 0;
  1738. struct cca_info ci;
  1739. /* fetch status of all crypto cards */
  1740. device_status = kvmalloc_array(MAX_ZDEV_ENTRIES_EXT,
  1741. sizeof(struct zcrypt_device_status_ext),
  1742. GFP_KERNEL);
  1743. if (!device_status)
  1744. return -ENOMEM;
  1745. zcrypt_device_status_mask_ext(device_status);
  1746. /* allocate 1k space for up to 256 apqns */
  1747. _apqns = kmalloc_array(256, sizeof(u32), GFP_KERNEL);
  1748. if (!_apqns) {
  1749. kvfree(device_status);
  1750. return -ENOMEM;
  1751. }
  1752. /* walk through all the crypto apqnss */
  1753. for (i = 0; i < MAX_ZDEV_ENTRIES_EXT; i++) {
  1754. card = AP_QID_CARD(device_status[i].qid);
  1755. dom = AP_QID_QUEUE(device_status[i].qid);
  1756. /* check online state */
  1757. if (!device_status[i].online)
  1758. continue;
  1759. /* check for cca functions */
  1760. if (!(device_status[i].functions & 0x04))
  1761. continue;
  1762. /* check cardnr */
  1763. if (cardnr != 0xFFFF && card != cardnr)
  1764. continue;
  1765. /* check domain */
  1766. if (domain != 0xFFFF && dom != domain)
  1767. continue;
  1768. /* get cca info on this apqn */
  1769. if (cca_get_info(card, dom, &ci, verify))
  1770. continue;
  1771. /* current master key needs to be valid */
  1772. if (mktype == AES_MK_SET && ci.cur_aes_mk_state != '2')
  1773. continue;
  1774. if (mktype == APKA_MK_SET && ci.cur_apka_mk_state != '2')
  1775. continue;
  1776. /* check min hardware type */
  1777. if (minhwtype > 0 && minhwtype > ci.hwtype)
  1778. continue;
  1779. if (cur_mkvp || old_mkvp) {
  1780. /* check mkvps */
  1781. curmatch = oldmatch = 0;
  1782. if (mktype == AES_MK_SET) {
  1783. if (cur_mkvp && cur_mkvp == ci.cur_aes_mkvp)
  1784. curmatch = 1;
  1785. if (old_mkvp && ci.old_aes_mk_state == '2' &&
  1786. old_mkvp == ci.old_aes_mkvp)
  1787. oldmatch = 1;
  1788. } else {
  1789. if (cur_mkvp && cur_mkvp == ci.cur_apka_mkvp)
  1790. curmatch = 1;
  1791. if (old_mkvp && ci.old_apka_mk_state == '2' &&
  1792. old_mkvp == ci.old_apka_mkvp)
  1793. oldmatch = 1;
  1794. }
  1795. if (curmatch + oldmatch < 1)
  1796. continue;
  1797. }
  1798. /* apqn passed all filtering criterons, add to the array */
  1799. if (_nr_apqns < 256)
  1800. _apqns[_nr_apqns++] = (((u16)card) << 16) | ((u16)dom);
  1801. }
  1802. /* nothing found ? */
  1803. if (!_nr_apqns) {
  1804. kfree(_apqns);
  1805. rc = -ENODEV;
  1806. } else {
  1807. /* no re-allocation, simple return the _apqns array */
  1808. *apqns = _apqns;
  1809. *nr_apqns = _nr_apqns;
  1810. rc = 0;
  1811. }
  1812. kvfree(device_status);
  1813. return rc;
  1814. }
  1815. EXPORT_SYMBOL(cca_findcard2);
  1816. void __exit zcrypt_ccamisc_exit(void)
  1817. {
  1818. mkvp_cache_free();
  1819. }