testmgr.c 152 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931
  1. // SPDX-License-Identifier: GPL-2.0-or-later
  2. /*
  3. * Algorithm testing framework and tests.
  4. *
  5. * Copyright (c) 2002 James Morris <[email protected]>
  6. * Copyright (c) 2002 Jean-Francois Dive <[email protected]>
  7. * Copyright (c) 2007 Nokia Siemens Networks
  8. * Copyright (c) 2008 Herbert Xu <[email protected]>
  9. * Copyright (c) 2019 Google LLC
  10. *
  11. * Updated RFC4106 AES-GCM testing.
  12. * Authors: Aidan O'Mahony ([email protected])
  13. * Adrian Hoban <[email protected]>
  14. * Gabriele Paoloni <[email protected]>
  15. * Tadeusz Struk ([email protected])
  16. * Copyright (c) 2010, Intel Corporation.
  17. */
  18. #include <crypto/aead.h>
  19. #include <crypto/hash.h>
  20. #include <crypto/skcipher.h>
  21. #include <linux/err.h>
  22. #include <linux/fips.h>
  23. #include <linux/module.h>
  24. #include <linux/once.h>
  25. #include <linux/random.h>
  26. #include <linux/scatterlist.h>
  27. #include <linux/slab.h>
  28. #include <linux/string.h>
  29. #include <linux/uio.h>
  30. #include <crypto/rng.h>
  31. #include <crypto/drbg.h>
  32. #include <crypto/akcipher.h>
  33. #include <crypto/kpp.h>
  34. #include <crypto/acompress.h>
  35. #include <crypto/internal/cipher.h>
  36. #include <crypto/internal/simd.h>
  37. #include "internal.h"
  38. MODULE_IMPORT_NS(CRYPTO_INTERNAL);
  39. static bool notests;
  40. module_param(notests, bool, 0644);
  41. MODULE_PARM_DESC(notests, "disable crypto self-tests");
  42. static bool panic_on_fail;
  43. module_param(panic_on_fail, bool, 0444);
  44. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  45. static bool noextratests;
  46. module_param(noextratests, bool, 0644);
  47. MODULE_PARM_DESC(noextratests, "disable expensive crypto self-tests");
  48. static unsigned int fuzz_iterations = 100;
  49. module_param(fuzz_iterations, uint, 0644);
  50. MODULE_PARM_DESC(fuzz_iterations, "number of fuzz test iterations");
  51. #endif
  52. #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
  53. /* a perfect nop */
  54. int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
  55. {
  56. return 0;
  57. }
  58. #else
  59. #include "testmgr.h"
  60. /*
  61. * Need slab memory for testing (size in number of pages).
  62. */
  63. #define XBUFSIZE 8
  64. /*
  65. * Used by test_cipher()
  66. */
  67. #define ENCRYPT 1
  68. #define DECRYPT 0
  69. struct aead_test_suite {
  70. const struct aead_testvec *vecs;
  71. unsigned int count;
  72. /*
  73. * Set if trying to decrypt an inauthentic ciphertext with this
  74. * algorithm might result in EINVAL rather than EBADMSG, due to other
  75. * validation the algorithm does on the inputs such as length checks.
  76. */
  77. unsigned int einval_allowed : 1;
  78. /*
  79. * Set if this algorithm requires that the IV be located at the end of
  80. * the AAD buffer, in addition to being given in the normal way. The
  81. * behavior when the two IV copies differ is implementation-defined.
  82. */
  83. unsigned int aad_iv : 1;
  84. };
  85. struct cipher_test_suite {
  86. const struct cipher_testvec *vecs;
  87. unsigned int count;
  88. };
  89. struct comp_test_suite {
  90. struct {
  91. const struct comp_testvec *vecs;
  92. unsigned int count;
  93. } comp, decomp;
  94. };
  95. struct hash_test_suite {
  96. const struct hash_testvec *vecs;
  97. unsigned int count;
  98. };
  99. struct cprng_test_suite {
  100. const struct cprng_testvec *vecs;
  101. unsigned int count;
  102. };
  103. struct drbg_test_suite {
  104. const struct drbg_testvec *vecs;
  105. unsigned int count;
  106. };
  107. struct akcipher_test_suite {
  108. const struct akcipher_testvec *vecs;
  109. unsigned int count;
  110. };
  111. struct kpp_test_suite {
  112. const struct kpp_testvec *vecs;
  113. unsigned int count;
  114. };
  115. struct alg_test_desc {
  116. const char *alg;
  117. const char *generic_driver;
  118. int (*test)(const struct alg_test_desc *desc, const char *driver,
  119. u32 type, u32 mask);
  120. int fips_allowed; /* set if alg is allowed in fips mode */
  121. union {
  122. struct aead_test_suite aead;
  123. struct cipher_test_suite cipher;
  124. struct comp_test_suite comp;
  125. struct hash_test_suite hash;
  126. struct cprng_test_suite cprng;
  127. struct drbg_test_suite drbg;
  128. struct akcipher_test_suite akcipher;
  129. struct kpp_test_suite kpp;
  130. } suite;
  131. };
  132. static void hexdump(unsigned char *buf, unsigned int len)
  133. {
  134. print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
  135. 16, 1,
  136. buf, len, false);
  137. }
  138. static int __testmgr_alloc_buf(char *buf[XBUFSIZE], int order)
  139. {
  140. int i;
  141. for (i = 0; i < XBUFSIZE; i++) {
  142. buf[i] = (char *)__get_free_pages(GFP_KERNEL, order);
  143. if (!buf[i])
  144. goto err_free_buf;
  145. }
  146. return 0;
  147. err_free_buf:
  148. while (i-- > 0)
  149. free_pages((unsigned long)buf[i], order);
  150. return -ENOMEM;
  151. }
  152. static int testmgr_alloc_buf(char *buf[XBUFSIZE])
  153. {
  154. return __testmgr_alloc_buf(buf, 0);
  155. }
  156. static void __testmgr_free_buf(char *buf[XBUFSIZE], int order)
  157. {
  158. int i;
  159. for (i = 0; i < XBUFSIZE; i++)
  160. free_pages((unsigned long)buf[i], order);
  161. }
  162. static void testmgr_free_buf(char *buf[XBUFSIZE])
  163. {
  164. __testmgr_free_buf(buf, 0);
  165. }
  166. #define TESTMGR_POISON_BYTE 0xfe
  167. #define TESTMGR_POISON_LEN 16
  168. static inline void testmgr_poison(void *addr, size_t len)
  169. {
  170. memset(addr, TESTMGR_POISON_BYTE, len);
  171. }
  172. /* Is the memory region still fully poisoned? */
  173. static inline bool testmgr_is_poison(const void *addr, size_t len)
  174. {
  175. return memchr_inv(addr, TESTMGR_POISON_BYTE, len) == NULL;
  176. }
  177. /* flush type for hash algorithms */
  178. enum flush_type {
  179. /* merge with update of previous buffer(s) */
  180. FLUSH_TYPE_NONE = 0,
  181. /* update with previous buffer(s) before doing this one */
  182. FLUSH_TYPE_FLUSH,
  183. /* likewise, but also export and re-import the intermediate state */
  184. FLUSH_TYPE_REIMPORT,
  185. };
  186. /* finalization function for hash algorithms */
  187. enum finalization_type {
  188. FINALIZATION_TYPE_FINAL, /* use final() */
  189. FINALIZATION_TYPE_FINUP, /* use finup() */
  190. FINALIZATION_TYPE_DIGEST, /* use digest() */
  191. };
  192. /*
  193. * Whether the crypto operation will occur in-place, and if so whether the
  194. * source and destination scatterlist pointers will coincide (req->src ==
  195. * req->dst), or whether they'll merely point to two separate scatterlists
  196. * (req->src != req->dst) that reference the same underlying memory.
  197. *
  198. * This is only relevant for algorithm types that support in-place operation.
  199. */
  200. enum inplace_mode {
  201. OUT_OF_PLACE,
  202. INPLACE_ONE_SGLIST,
  203. INPLACE_TWO_SGLISTS,
  204. };
  205. #define TEST_SG_TOTAL 10000
  206. /**
  207. * struct test_sg_division - description of a scatterlist entry
  208. *
  209. * This struct describes one entry of a scatterlist being constructed to check a
  210. * crypto test vector.
  211. *
  212. * @proportion_of_total: length of this chunk relative to the total length,
  213. * given as a proportion out of TEST_SG_TOTAL so that it
  214. * scales to fit any test vector
  215. * @offset: byte offset into a 2-page buffer at which this chunk will start
  216. * @offset_relative_to_alignmask: if true, add the algorithm's alignmask to the
  217. * @offset
  218. * @flush_type: for hashes, whether an update() should be done now vs.
  219. * continuing to accumulate data
  220. * @nosimd: if doing the pending update(), do it with SIMD disabled?
  221. */
  222. struct test_sg_division {
  223. unsigned int proportion_of_total;
  224. unsigned int offset;
  225. bool offset_relative_to_alignmask;
  226. enum flush_type flush_type;
  227. bool nosimd;
  228. };
  229. /**
  230. * struct testvec_config - configuration for testing a crypto test vector
  231. *
  232. * This struct describes the data layout and other parameters with which each
  233. * crypto test vector can be tested.
  234. *
  235. * @name: name of this config, logged for debugging purposes if a test fails
  236. * @inplace_mode: whether and how to operate on the data in-place, if applicable
  237. * @req_flags: extra request_flags, e.g. CRYPTO_TFM_REQ_MAY_SLEEP
  238. * @src_divs: description of how to arrange the source scatterlist
  239. * @dst_divs: description of how to arrange the dst scatterlist, if applicable
  240. * for the algorithm type. Defaults to @src_divs if unset.
  241. * @iv_offset: misalignment of the IV in the range [0..MAX_ALGAPI_ALIGNMASK+1],
  242. * where 0 is aligned to a 2*(MAX_ALGAPI_ALIGNMASK+1) byte boundary
  243. * @iv_offset_relative_to_alignmask: if true, add the algorithm's alignmask to
  244. * the @iv_offset
  245. * @key_offset: misalignment of the key, where 0 is default alignment
  246. * @key_offset_relative_to_alignmask: if true, add the algorithm's alignmask to
  247. * the @key_offset
  248. * @finalization_type: what finalization function to use for hashes
  249. * @nosimd: execute with SIMD disabled? Requires !CRYPTO_TFM_REQ_MAY_SLEEP.
  250. */
  251. struct testvec_config {
  252. const char *name;
  253. enum inplace_mode inplace_mode;
  254. u32 req_flags;
  255. struct test_sg_division src_divs[XBUFSIZE];
  256. struct test_sg_division dst_divs[XBUFSIZE];
  257. unsigned int iv_offset;
  258. unsigned int key_offset;
  259. bool iv_offset_relative_to_alignmask;
  260. bool key_offset_relative_to_alignmask;
  261. enum finalization_type finalization_type;
  262. bool nosimd;
  263. };
  264. #define TESTVEC_CONFIG_NAMELEN 192
  265. /*
  266. * The following are the lists of testvec_configs to test for each algorithm
  267. * type when the basic crypto self-tests are enabled, i.e. when
  268. * CONFIG_CRYPTO_MANAGER_DISABLE_TESTS is unset. They aim to provide good test
  269. * coverage, while keeping the test time much shorter than the full fuzz tests
  270. * so that the basic tests can be enabled in a wider range of circumstances.
  271. */
  272. /* Configs for skciphers and aeads */
  273. static const struct testvec_config default_cipher_testvec_configs[] = {
  274. {
  275. .name = "in-place (one sglist)",
  276. .inplace_mode = INPLACE_ONE_SGLIST,
  277. .src_divs = { { .proportion_of_total = 10000 } },
  278. }, {
  279. .name = "in-place (two sglists)",
  280. .inplace_mode = INPLACE_TWO_SGLISTS,
  281. .src_divs = { { .proportion_of_total = 10000 } },
  282. }, {
  283. .name = "out-of-place",
  284. .inplace_mode = OUT_OF_PLACE,
  285. .src_divs = { { .proportion_of_total = 10000 } },
  286. }, {
  287. .name = "unaligned buffer, offset=1",
  288. .src_divs = { { .proportion_of_total = 10000, .offset = 1 } },
  289. .iv_offset = 1,
  290. .key_offset = 1,
  291. }, {
  292. .name = "buffer aligned only to alignmask",
  293. .src_divs = {
  294. {
  295. .proportion_of_total = 10000,
  296. .offset = 1,
  297. .offset_relative_to_alignmask = true,
  298. },
  299. },
  300. .iv_offset = 1,
  301. .iv_offset_relative_to_alignmask = true,
  302. .key_offset = 1,
  303. .key_offset_relative_to_alignmask = true,
  304. }, {
  305. .name = "two even aligned splits",
  306. .src_divs = {
  307. { .proportion_of_total = 5000 },
  308. { .proportion_of_total = 5000 },
  309. },
  310. }, {
  311. .name = "uneven misaligned splits, may sleep",
  312. .req_flags = CRYPTO_TFM_REQ_MAY_SLEEP,
  313. .src_divs = {
  314. { .proportion_of_total = 1900, .offset = 33 },
  315. { .proportion_of_total = 3300, .offset = 7 },
  316. { .proportion_of_total = 4800, .offset = 18 },
  317. },
  318. .iv_offset = 3,
  319. .key_offset = 3,
  320. }, {
  321. .name = "misaligned splits crossing pages, inplace",
  322. .inplace_mode = INPLACE_ONE_SGLIST,
  323. .src_divs = {
  324. {
  325. .proportion_of_total = 7500,
  326. .offset = PAGE_SIZE - 32
  327. }, {
  328. .proportion_of_total = 2500,
  329. .offset = PAGE_SIZE - 7
  330. },
  331. },
  332. }
  333. };
  334. static const struct testvec_config default_hash_testvec_configs[] = {
  335. {
  336. .name = "init+update+final aligned buffer",
  337. .src_divs = { { .proportion_of_total = 10000 } },
  338. .finalization_type = FINALIZATION_TYPE_FINAL,
  339. }, {
  340. .name = "init+finup aligned buffer",
  341. .src_divs = { { .proportion_of_total = 10000 } },
  342. .finalization_type = FINALIZATION_TYPE_FINUP,
  343. }, {
  344. .name = "digest aligned buffer",
  345. .src_divs = { { .proportion_of_total = 10000 } },
  346. .finalization_type = FINALIZATION_TYPE_DIGEST,
  347. }, {
  348. .name = "init+update+final misaligned buffer",
  349. .src_divs = { { .proportion_of_total = 10000, .offset = 1 } },
  350. .finalization_type = FINALIZATION_TYPE_FINAL,
  351. .key_offset = 1,
  352. }, {
  353. .name = "digest buffer aligned only to alignmask",
  354. .src_divs = {
  355. {
  356. .proportion_of_total = 10000,
  357. .offset = 1,
  358. .offset_relative_to_alignmask = true,
  359. },
  360. },
  361. .finalization_type = FINALIZATION_TYPE_DIGEST,
  362. .key_offset = 1,
  363. .key_offset_relative_to_alignmask = true,
  364. }, {
  365. .name = "init+update+update+final two even splits",
  366. .src_divs = {
  367. { .proportion_of_total = 5000 },
  368. {
  369. .proportion_of_total = 5000,
  370. .flush_type = FLUSH_TYPE_FLUSH,
  371. },
  372. },
  373. .finalization_type = FINALIZATION_TYPE_FINAL,
  374. }, {
  375. .name = "digest uneven misaligned splits, may sleep",
  376. .req_flags = CRYPTO_TFM_REQ_MAY_SLEEP,
  377. .src_divs = {
  378. { .proportion_of_total = 1900, .offset = 33 },
  379. { .proportion_of_total = 3300, .offset = 7 },
  380. { .proportion_of_total = 4800, .offset = 18 },
  381. },
  382. .finalization_type = FINALIZATION_TYPE_DIGEST,
  383. }, {
  384. .name = "digest misaligned splits crossing pages",
  385. .src_divs = {
  386. {
  387. .proportion_of_total = 7500,
  388. .offset = PAGE_SIZE - 32,
  389. }, {
  390. .proportion_of_total = 2500,
  391. .offset = PAGE_SIZE - 7,
  392. },
  393. },
  394. .finalization_type = FINALIZATION_TYPE_DIGEST,
  395. }, {
  396. .name = "import/export",
  397. .src_divs = {
  398. {
  399. .proportion_of_total = 6500,
  400. .flush_type = FLUSH_TYPE_REIMPORT,
  401. }, {
  402. .proportion_of_total = 3500,
  403. .flush_type = FLUSH_TYPE_REIMPORT,
  404. },
  405. },
  406. .finalization_type = FINALIZATION_TYPE_FINAL,
  407. }
  408. };
  409. static unsigned int count_test_sg_divisions(const struct test_sg_division *divs)
  410. {
  411. unsigned int remaining = TEST_SG_TOTAL;
  412. unsigned int ndivs = 0;
  413. do {
  414. remaining -= divs[ndivs++].proportion_of_total;
  415. } while (remaining);
  416. return ndivs;
  417. }
  418. #define SGDIVS_HAVE_FLUSHES BIT(0)
  419. #define SGDIVS_HAVE_NOSIMD BIT(1)
  420. static bool valid_sg_divisions(const struct test_sg_division *divs,
  421. unsigned int count, int *flags_ret)
  422. {
  423. unsigned int total = 0;
  424. unsigned int i;
  425. for (i = 0; i < count && total != TEST_SG_TOTAL; i++) {
  426. if (divs[i].proportion_of_total <= 0 ||
  427. divs[i].proportion_of_total > TEST_SG_TOTAL - total)
  428. return false;
  429. total += divs[i].proportion_of_total;
  430. if (divs[i].flush_type != FLUSH_TYPE_NONE)
  431. *flags_ret |= SGDIVS_HAVE_FLUSHES;
  432. if (divs[i].nosimd)
  433. *flags_ret |= SGDIVS_HAVE_NOSIMD;
  434. }
  435. return total == TEST_SG_TOTAL &&
  436. memchr_inv(&divs[i], 0, (count - i) * sizeof(divs[0])) == NULL;
  437. }
  438. /*
  439. * Check whether the given testvec_config is valid. This isn't strictly needed
  440. * since every testvec_config should be valid, but check anyway so that people
  441. * don't unknowingly add broken configs that don't do what they wanted.
  442. */
  443. static bool valid_testvec_config(const struct testvec_config *cfg)
  444. {
  445. int flags = 0;
  446. if (cfg->name == NULL)
  447. return false;
  448. if (!valid_sg_divisions(cfg->src_divs, ARRAY_SIZE(cfg->src_divs),
  449. &flags))
  450. return false;
  451. if (cfg->dst_divs[0].proportion_of_total) {
  452. if (!valid_sg_divisions(cfg->dst_divs,
  453. ARRAY_SIZE(cfg->dst_divs), &flags))
  454. return false;
  455. } else {
  456. if (memchr_inv(cfg->dst_divs, 0, sizeof(cfg->dst_divs)))
  457. return false;
  458. /* defaults to dst_divs=src_divs */
  459. }
  460. if (cfg->iv_offset +
  461. (cfg->iv_offset_relative_to_alignmask ? MAX_ALGAPI_ALIGNMASK : 0) >
  462. MAX_ALGAPI_ALIGNMASK + 1)
  463. return false;
  464. if ((flags & (SGDIVS_HAVE_FLUSHES | SGDIVS_HAVE_NOSIMD)) &&
  465. cfg->finalization_type == FINALIZATION_TYPE_DIGEST)
  466. return false;
  467. if ((cfg->nosimd || (flags & SGDIVS_HAVE_NOSIMD)) &&
  468. (cfg->req_flags & CRYPTO_TFM_REQ_MAY_SLEEP))
  469. return false;
  470. return true;
  471. }
  472. struct test_sglist {
  473. char *bufs[XBUFSIZE];
  474. struct scatterlist sgl[XBUFSIZE];
  475. struct scatterlist sgl_saved[XBUFSIZE];
  476. struct scatterlist *sgl_ptr;
  477. unsigned int nents;
  478. };
  479. static int init_test_sglist(struct test_sglist *tsgl)
  480. {
  481. return __testmgr_alloc_buf(tsgl->bufs, 1 /* two pages per buffer */);
  482. }
  483. static void destroy_test_sglist(struct test_sglist *tsgl)
  484. {
  485. return __testmgr_free_buf(tsgl->bufs, 1 /* two pages per buffer */);
  486. }
  487. /**
  488. * build_test_sglist() - build a scatterlist for a crypto test
  489. *
  490. * @tsgl: the scatterlist to build. @tsgl->bufs[] contains an array of 2-page
  491. * buffers which the scatterlist @tsgl->sgl[] will be made to point into.
  492. * @divs: the layout specification on which the scatterlist will be based
  493. * @alignmask: the algorithm's alignmask
  494. * @total_len: the total length of the scatterlist to build in bytes
  495. * @data: if non-NULL, the buffers will be filled with this data until it ends.
  496. * Otherwise the buffers will be poisoned. In both cases, some bytes
  497. * past the end of each buffer will be poisoned to help detect overruns.
  498. * @out_divs: if non-NULL, the test_sg_division to which each scatterlist entry
  499. * corresponds will be returned here. This will match @divs except
  500. * that divisions resolving to a length of 0 are omitted as they are
  501. * not included in the scatterlist.
  502. *
  503. * Return: 0 or a -errno value
  504. */
  505. static int build_test_sglist(struct test_sglist *tsgl,
  506. const struct test_sg_division *divs,
  507. const unsigned int alignmask,
  508. const unsigned int total_len,
  509. struct iov_iter *data,
  510. const struct test_sg_division *out_divs[XBUFSIZE])
  511. {
  512. struct {
  513. const struct test_sg_division *div;
  514. size_t length;
  515. } partitions[XBUFSIZE];
  516. const unsigned int ndivs = count_test_sg_divisions(divs);
  517. unsigned int len_remaining = total_len;
  518. unsigned int i;
  519. BUILD_BUG_ON(ARRAY_SIZE(partitions) != ARRAY_SIZE(tsgl->sgl));
  520. if (WARN_ON(ndivs > ARRAY_SIZE(partitions)))
  521. return -EINVAL;
  522. /* Calculate the (div, length) pairs */
  523. tsgl->nents = 0;
  524. for (i = 0; i < ndivs; i++) {
  525. unsigned int len_this_sg =
  526. min(len_remaining,
  527. (total_len * divs[i].proportion_of_total +
  528. TEST_SG_TOTAL / 2) / TEST_SG_TOTAL);
  529. if (len_this_sg != 0) {
  530. partitions[tsgl->nents].div = &divs[i];
  531. partitions[tsgl->nents].length = len_this_sg;
  532. tsgl->nents++;
  533. len_remaining -= len_this_sg;
  534. }
  535. }
  536. if (tsgl->nents == 0) {
  537. partitions[tsgl->nents].div = &divs[0];
  538. partitions[tsgl->nents].length = 0;
  539. tsgl->nents++;
  540. }
  541. partitions[tsgl->nents - 1].length += len_remaining;
  542. /* Set up the sgl entries and fill the data or poison */
  543. sg_init_table(tsgl->sgl, tsgl->nents);
  544. for (i = 0; i < tsgl->nents; i++) {
  545. unsigned int offset = partitions[i].div->offset;
  546. void *addr;
  547. if (partitions[i].div->offset_relative_to_alignmask)
  548. offset += alignmask;
  549. while (offset + partitions[i].length + TESTMGR_POISON_LEN >
  550. 2 * PAGE_SIZE) {
  551. if (WARN_ON(offset <= 0))
  552. return -EINVAL;
  553. offset /= 2;
  554. }
  555. addr = &tsgl->bufs[i][offset];
  556. sg_set_buf(&tsgl->sgl[i], addr, partitions[i].length);
  557. if (out_divs)
  558. out_divs[i] = partitions[i].div;
  559. if (data) {
  560. size_t copy_len, copied;
  561. copy_len = min(partitions[i].length, data->count);
  562. copied = copy_from_iter(addr, copy_len, data);
  563. if (WARN_ON(copied != copy_len))
  564. return -EINVAL;
  565. testmgr_poison(addr + copy_len, partitions[i].length +
  566. TESTMGR_POISON_LEN - copy_len);
  567. } else {
  568. testmgr_poison(addr, partitions[i].length +
  569. TESTMGR_POISON_LEN);
  570. }
  571. }
  572. sg_mark_end(&tsgl->sgl[tsgl->nents - 1]);
  573. tsgl->sgl_ptr = tsgl->sgl;
  574. memcpy(tsgl->sgl_saved, tsgl->sgl, tsgl->nents * sizeof(tsgl->sgl[0]));
  575. return 0;
  576. }
  577. /*
  578. * Verify that a scatterlist crypto operation produced the correct output.
  579. *
  580. * @tsgl: scatterlist containing the actual output
  581. * @expected_output: buffer containing the expected output
  582. * @len_to_check: length of @expected_output in bytes
  583. * @unchecked_prefix_len: number of ignored bytes in @tsgl prior to real result
  584. * @check_poison: verify that the poison bytes after each chunk are intact?
  585. *
  586. * Return: 0 if correct, -EINVAL if incorrect, -EOVERFLOW if buffer overrun.
  587. */
  588. static int verify_correct_output(const struct test_sglist *tsgl,
  589. const char *expected_output,
  590. unsigned int len_to_check,
  591. unsigned int unchecked_prefix_len,
  592. bool check_poison)
  593. {
  594. unsigned int i;
  595. for (i = 0; i < tsgl->nents; i++) {
  596. struct scatterlist *sg = &tsgl->sgl_ptr[i];
  597. unsigned int len = sg->length;
  598. unsigned int offset = sg->offset;
  599. const char *actual_output;
  600. if (unchecked_prefix_len) {
  601. if (unchecked_prefix_len >= len) {
  602. unchecked_prefix_len -= len;
  603. continue;
  604. }
  605. offset += unchecked_prefix_len;
  606. len -= unchecked_prefix_len;
  607. unchecked_prefix_len = 0;
  608. }
  609. len = min(len, len_to_check);
  610. actual_output = page_address(sg_page(sg)) + offset;
  611. if (memcmp(expected_output, actual_output, len) != 0)
  612. return -EINVAL;
  613. if (check_poison &&
  614. !testmgr_is_poison(actual_output + len, TESTMGR_POISON_LEN))
  615. return -EOVERFLOW;
  616. len_to_check -= len;
  617. expected_output += len;
  618. }
  619. if (WARN_ON(len_to_check != 0))
  620. return -EINVAL;
  621. return 0;
  622. }
  623. static bool is_test_sglist_corrupted(const struct test_sglist *tsgl)
  624. {
  625. unsigned int i;
  626. for (i = 0; i < tsgl->nents; i++) {
  627. if (tsgl->sgl[i].page_link != tsgl->sgl_saved[i].page_link)
  628. return true;
  629. if (tsgl->sgl[i].offset != tsgl->sgl_saved[i].offset)
  630. return true;
  631. if (tsgl->sgl[i].length != tsgl->sgl_saved[i].length)
  632. return true;
  633. }
  634. return false;
  635. }
  636. struct cipher_test_sglists {
  637. struct test_sglist src;
  638. struct test_sglist dst;
  639. };
  640. static struct cipher_test_sglists *alloc_cipher_test_sglists(void)
  641. {
  642. struct cipher_test_sglists *tsgls;
  643. tsgls = kmalloc(sizeof(*tsgls), GFP_KERNEL);
  644. if (!tsgls)
  645. return NULL;
  646. if (init_test_sglist(&tsgls->src) != 0)
  647. goto fail_kfree;
  648. if (init_test_sglist(&tsgls->dst) != 0)
  649. goto fail_destroy_src;
  650. return tsgls;
  651. fail_destroy_src:
  652. destroy_test_sglist(&tsgls->src);
  653. fail_kfree:
  654. kfree(tsgls);
  655. return NULL;
  656. }
  657. static void free_cipher_test_sglists(struct cipher_test_sglists *tsgls)
  658. {
  659. if (tsgls) {
  660. destroy_test_sglist(&tsgls->src);
  661. destroy_test_sglist(&tsgls->dst);
  662. kfree(tsgls);
  663. }
  664. }
  665. /* Build the src and dst scatterlists for an skcipher or AEAD test */
  666. static int build_cipher_test_sglists(struct cipher_test_sglists *tsgls,
  667. const struct testvec_config *cfg,
  668. unsigned int alignmask,
  669. unsigned int src_total_len,
  670. unsigned int dst_total_len,
  671. const struct kvec *inputs,
  672. unsigned int nr_inputs)
  673. {
  674. struct iov_iter input;
  675. int err;
  676. iov_iter_kvec(&input, ITER_SOURCE, inputs, nr_inputs, src_total_len);
  677. err = build_test_sglist(&tsgls->src, cfg->src_divs, alignmask,
  678. cfg->inplace_mode != OUT_OF_PLACE ?
  679. max(dst_total_len, src_total_len) :
  680. src_total_len,
  681. &input, NULL);
  682. if (err)
  683. return err;
  684. /*
  685. * In-place crypto operations can use the same scatterlist for both the
  686. * source and destination (req->src == req->dst), or can use separate
  687. * scatterlists (req->src != req->dst) which point to the same
  688. * underlying memory. Make sure to test both cases.
  689. */
  690. if (cfg->inplace_mode == INPLACE_ONE_SGLIST) {
  691. tsgls->dst.sgl_ptr = tsgls->src.sgl;
  692. tsgls->dst.nents = tsgls->src.nents;
  693. return 0;
  694. }
  695. if (cfg->inplace_mode == INPLACE_TWO_SGLISTS) {
  696. /*
  697. * For now we keep it simple and only test the case where the
  698. * two scatterlists have identical entries, rather than
  699. * different entries that split up the same memory differently.
  700. */
  701. memcpy(tsgls->dst.sgl, tsgls->src.sgl,
  702. tsgls->src.nents * sizeof(tsgls->src.sgl[0]));
  703. memcpy(tsgls->dst.sgl_saved, tsgls->src.sgl,
  704. tsgls->src.nents * sizeof(tsgls->src.sgl[0]));
  705. tsgls->dst.sgl_ptr = tsgls->dst.sgl;
  706. tsgls->dst.nents = tsgls->src.nents;
  707. return 0;
  708. }
  709. /* Out of place */
  710. return build_test_sglist(&tsgls->dst,
  711. cfg->dst_divs[0].proportion_of_total ?
  712. cfg->dst_divs : cfg->src_divs,
  713. alignmask, dst_total_len, NULL, NULL);
  714. }
  715. /*
  716. * Support for testing passing a misaligned key to setkey():
  717. *
  718. * If cfg->key_offset is set, copy the key into a new buffer at that offset,
  719. * optionally adding alignmask. Else, just use the key directly.
  720. */
  721. static int prepare_keybuf(const u8 *key, unsigned int ksize,
  722. const struct testvec_config *cfg,
  723. unsigned int alignmask,
  724. const u8 **keybuf_ret, const u8 **keyptr_ret)
  725. {
  726. unsigned int key_offset = cfg->key_offset;
  727. u8 *keybuf = NULL, *keyptr = (u8 *)key;
  728. if (key_offset != 0) {
  729. if (cfg->key_offset_relative_to_alignmask)
  730. key_offset += alignmask;
  731. keybuf = kmalloc(key_offset + ksize, GFP_KERNEL);
  732. if (!keybuf)
  733. return -ENOMEM;
  734. keyptr = keybuf + key_offset;
  735. memcpy(keyptr, key, ksize);
  736. }
  737. *keybuf_ret = keybuf;
  738. *keyptr_ret = keyptr;
  739. return 0;
  740. }
  741. /* Like setkey_f(tfm, key, ksize), but sometimes misalign the key */
  742. #define do_setkey(setkey_f, tfm, key, ksize, cfg, alignmask) \
  743. ({ \
  744. const u8 *keybuf, *keyptr; \
  745. int err; \
  746. \
  747. err = prepare_keybuf((key), (ksize), (cfg), (alignmask), \
  748. &keybuf, &keyptr); \
  749. if (err == 0) { \
  750. err = setkey_f((tfm), keyptr, (ksize)); \
  751. kfree(keybuf); \
  752. } \
  753. err; \
  754. })
  755. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  756. /*
  757. * The fuzz tests use prandom instead of the normal Linux RNG since they don't
  758. * need cryptographically secure random numbers. This greatly improves the
  759. * performance of these tests, especially if they are run before the Linux RNG
  760. * has been initialized or if they are run on a lockdep-enabled kernel.
  761. */
  762. static inline void init_rnd_state(struct rnd_state *rng)
  763. {
  764. prandom_seed_state(rng, get_random_u64());
  765. }
  766. static inline u8 prandom_u8(struct rnd_state *rng)
  767. {
  768. return prandom_u32_state(rng);
  769. }
  770. static inline u32 prandom_u32_below(struct rnd_state *rng, u32 ceil)
  771. {
  772. /*
  773. * This is slightly biased for non-power-of-2 values of 'ceil', but this
  774. * isn't important here.
  775. */
  776. return prandom_u32_state(rng) % ceil;
  777. }
  778. static inline bool prandom_bool(struct rnd_state *rng)
  779. {
  780. return prandom_u32_below(rng, 2);
  781. }
  782. static inline u32 prandom_u32_inclusive(struct rnd_state *rng,
  783. u32 floor, u32 ceil)
  784. {
  785. return floor + prandom_u32_below(rng, ceil - floor + 1);
  786. }
  787. /* Generate a random length in range [0, max_len], but prefer smaller values */
  788. static unsigned int generate_random_length(struct rnd_state *rng,
  789. unsigned int max_len)
  790. {
  791. unsigned int len = prandom_u32_below(rng, max_len + 1);
  792. switch (prandom_u32_below(rng, 4)) {
  793. case 0:
  794. return len % 64;
  795. case 1:
  796. return len % 256;
  797. case 2:
  798. return len % 1024;
  799. default:
  800. return len;
  801. }
  802. }
  803. /* Flip a random bit in the given nonempty data buffer */
  804. static void flip_random_bit(struct rnd_state *rng, u8 *buf, size_t size)
  805. {
  806. size_t bitpos;
  807. bitpos = prandom_u32_below(rng, size * 8);
  808. buf[bitpos / 8] ^= 1 << (bitpos % 8);
  809. }
  810. /* Flip a random byte in the given nonempty data buffer */
  811. static void flip_random_byte(struct rnd_state *rng, u8 *buf, size_t size)
  812. {
  813. buf[prandom_u32_below(rng, size)] ^= 0xff;
  814. }
  815. /* Sometimes make some random changes to the given nonempty data buffer */
  816. static void mutate_buffer(struct rnd_state *rng, u8 *buf, size_t size)
  817. {
  818. size_t num_flips;
  819. size_t i;
  820. /* Sometimes flip some bits */
  821. if (prandom_u32_below(rng, 4) == 0) {
  822. num_flips = min_t(size_t, 1 << prandom_u32_below(rng, 8),
  823. size * 8);
  824. for (i = 0; i < num_flips; i++)
  825. flip_random_bit(rng, buf, size);
  826. }
  827. /* Sometimes flip some bytes */
  828. if (prandom_u32_below(rng, 4) == 0) {
  829. num_flips = min_t(size_t, 1 << prandom_u32_below(rng, 8), size);
  830. for (i = 0; i < num_flips; i++)
  831. flip_random_byte(rng, buf, size);
  832. }
  833. }
  834. /* Randomly generate 'count' bytes, but sometimes make them "interesting" */
  835. static void generate_random_bytes(struct rnd_state *rng, u8 *buf, size_t count)
  836. {
  837. u8 b;
  838. u8 increment;
  839. size_t i;
  840. if (count == 0)
  841. return;
  842. switch (prandom_u32_below(rng, 8)) { /* Choose a generation strategy */
  843. case 0:
  844. case 1:
  845. /* All the same byte, plus optional mutations */
  846. switch (prandom_u32_below(rng, 4)) {
  847. case 0:
  848. b = 0x00;
  849. break;
  850. case 1:
  851. b = 0xff;
  852. break;
  853. default:
  854. b = prandom_u8(rng);
  855. break;
  856. }
  857. memset(buf, b, count);
  858. mutate_buffer(rng, buf, count);
  859. break;
  860. case 2:
  861. /* Ascending or descending bytes, plus optional mutations */
  862. increment = prandom_u8(rng);
  863. b = prandom_u8(rng);
  864. for (i = 0; i < count; i++, b += increment)
  865. buf[i] = b;
  866. mutate_buffer(rng, buf, count);
  867. break;
  868. default:
  869. /* Fully random bytes */
  870. prandom_bytes_state(rng, buf, count);
  871. }
  872. }
  873. static char *generate_random_sgl_divisions(struct rnd_state *rng,
  874. struct test_sg_division *divs,
  875. size_t max_divs, char *p, char *end,
  876. bool gen_flushes, u32 req_flags)
  877. {
  878. struct test_sg_division *div = divs;
  879. unsigned int remaining = TEST_SG_TOTAL;
  880. do {
  881. unsigned int this_len;
  882. const char *flushtype_str;
  883. if (div == &divs[max_divs - 1] || prandom_bool(rng))
  884. this_len = remaining;
  885. else
  886. this_len = prandom_u32_inclusive(rng, 1, remaining);
  887. div->proportion_of_total = this_len;
  888. if (prandom_u32_below(rng, 4) == 0)
  889. div->offset = prandom_u32_inclusive(rng,
  890. PAGE_SIZE - 128,
  891. PAGE_SIZE - 1);
  892. else if (prandom_bool(rng))
  893. div->offset = prandom_u32_below(rng, 32);
  894. else
  895. div->offset = prandom_u32_below(rng, PAGE_SIZE);
  896. if (prandom_u32_below(rng, 8) == 0)
  897. div->offset_relative_to_alignmask = true;
  898. div->flush_type = FLUSH_TYPE_NONE;
  899. if (gen_flushes) {
  900. switch (prandom_u32_below(rng, 4)) {
  901. case 0:
  902. div->flush_type = FLUSH_TYPE_REIMPORT;
  903. break;
  904. case 1:
  905. div->flush_type = FLUSH_TYPE_FLUSH;
  906. break;
  907. }
  908. }
  909. if (div->flush_type != FLUSH_TYPE_NONE &&
  910. !(req_flags & CRYPTO_TFM_REQ_MAY_SLEEP) &&
  911. prandom_bool(rng))
  912. div->nosimd = true;
  913. switch (div->flush_type) {
  914. case FLUSH_TYPE_FLUSH:
  915. if (div->nosimd)
  916. flushtype_str = "<flush,nosimd>";
  917. else
  918. flushtype_str = "<flush>";
  919. break;
  920. case FLUSH_TYPE_REIMPORT:
  921. if (div->nosimd)
  922. flushtype_str = "<reimport,nosimd>";
  923. else
  924. flushtype_str = "<reimport>";
  925. break;
  926. default:
  927. flushtype_str = "";
  928. break;
  929. }
  930. BUILD_BUG_ON(TEST_SG_TOTAL != 10000); /* for "%u.%u%%" */
  931. p += scnprintf(p, end - p, "%s%u.%u%%@%s+%u%s", flushtype_str,
  932. this_len / 100, this_len % 100,
  933. div->offset_relative_to_alignmask ?
  934. "alignmask" : "",
  935. div->offset, this_len == remaining ? "" : ", ");
  936. remaining -= this_len;
  937. div++;
  938. } while (remaining);
  939. return p;
  940. }
  941. /* Generate a random testvec_config for fuzz testing */
  942. static void generate_random_testvec_config(struct rnd_state *rng,
  943. struct testvec_config *cfg,
  944. char *name, size_t max_namelen)
  945. {
  946. char *p = name;
  947. char * const end = name + max_namelen;
  948. memset(cfg, 0, sizeof(*cfg));
  949. cfg->name = name;
  950. p += scnprintf(p, end - p, "random:");
  951. switch (prandom_u32_below(rng, 4)) {
  952. case 0:
  953. case 1:
  954. cfg->inplace_mode = OUT_OF_PLACE;
  955. break;
  956. case 2:
  957. cfg->inplace_mode = INPLACE_ONE_SGLIST;
  958. p += scnprintf(p, end - p, " inplace_one_sglist");
  959. break;
  960. default:
  961. cfg->inplace_mode = INPLACE_TWO_SGLISTS;
  962. p += scnprintf(p, end - p, " inplace_two_sglists");
  963. break;
  964. }
  965. if (prandom_bool(rng)) {
  966. cfg->req_flags |= CRYPTO_TFM_REQ_MAY_SLEEP;
  967. p += scnprintf(p, end - p, " may_sleep");
  968. }
  969. switch (prandom_u32_below(rng, 4)) {
  970. case 0:
  971. cfg->finalization_type = FINALIZATION_TYPE_FINAL;
  972. p += scnprintf(p, end - p, " use_final");
  973. break;
  974. case 1:
  975. cfg->finalization_type = FINALIZATION_TYPE_FINUP;
  976. p += scnprintf(p, end - p, " use_finup");
  977. break;
  978. default:
  979. cfg->finalization_type = FINALIZATION_TYPE_DIGEST;
  980. p += scnprintf(p, end - p, " use_digest");
  981. break;
  982. }
  983. if (!(cfg->req_flags & CRYPTO_TFM_REQ_MAY_SLEEP) && prandom_bool(rng)) {
  984. cfg->nosimd = true;
  985. p += scnprintf(p, end - p, " nosimd");
  986. }
  987. p += scnprintf(p, end - p, " src_divs=[");
  988. p = generate_random_sgl_divisions(rng, cfg->src_divs,
  989. ARRAY_SIZE(cfg->src_divs), p, end,
  990. (cfg->finalization_type !=
  991. FINALIZATION_TYPE_DIGEST),
  992. cfg->req_flags);
  993. p += scnprintf(p, end - p, "]");
  994. if (cfg->inplace_mode == OUT_OF_PLACE && prandom_bool(rng)) {
  995. p += scnprintf(p, end - p, " dst_divs=[");
  996. p = generate_random_sgl_divisions(rng, cfg->dst_divs,
  997. ARRAY_SIZE(cfg->dst_divs),
  998. p, end, false,
  999. cfg->req_flags);
  1000. p += scnprintf(p, end - p, "]");
  1001. }
  1002. if (prandom_bool(rng)) {
  1003. cfg->iv_offset = prandom_u32_inclusive(rng, 1,
  1004. MAX_ALGAPI_ALIGNMASK);
  1005. p += scnprintf(p, end - p, " iv_offset=%u", cfg->iv_offset);
  1006. }
  1007. if (prandom_bool(rng)) {
  1008. cfg->key_offset = prandom_u32_inclusive(rng, 1,
  1009. MAX_ALGAPI_ALIGNMASK);
  1010. p += scnprintf(p, end - p, " key_offset=%u", cfg->key_offset);
  1011. }
  1012. WARN_ON_ONCE(!valid_testvec_config(cfg));
  1013. }
  1014. static void crypto_disable_simd_for_test(void)
  1015. {
  1016. migrate_disable();
  1017. __this_cpu_write(crypto_simd_disabled_for_test, true);
  1018. }
  1019. static void crypto_reenable_simd_for_test(void)
  1020. {
  1021. __this_cpu_write(crypto_simd_disabled_for_test, false);
  1022. migrate_enable();
  1023. }
  1024. /*
  1025. * Given an algorithm name, build the name of the generic implementation of that
  1026. * algorithm, assuming the usual naming convention. Specifically, this appends
  1027. * "-generic" to every part of the name that is not a template name. Examples:
  1028. *
  1029. * aes => aes-generic
  1030. * cbc(aes) => cbc(aes-generic)
  1031. * cts(cbc(aes)) => cts(cbc(aes-generic))
  1032. * rfc7539(chacha20,poly1305) => rfc7539(chacha20-generic,poly1305-generic)
  1033. *
  1034. * Return: 0 on success, or -ENAMETOOLONG if the generic name would be too long
  1035. */
  1036. static int build_generic_driver_name(const char *algname,
  1037. char driver_name[CRYPTO_MAX_ALG_NAME])
  1038. {
  1039. const char *in = algname;
  1040. char *out = driver_name;
  1041. size_t len = strlen(algname);
  1042. if (len >= CRYPTO_MAX_ALG_NAME)
  1043. goto too_long;
  1044. do {
  1045. const char *in_saved = in;
  1046. while (*in && *in != '(' && *in != ')' && *in != ',')
  1047. *out++ = *in++;
  1048. if (*in != '(' && in > in_saved) {
  1049. len += 8;
  1050. if (len >= CRYPTO_MAX_ALG_NAME)
  1051. goto too_long;
  1052. memcpy(out, "-generic", 8);
  1053. out += 8;
  1054. }
  1055. } while ((*out++ = *in++) != '\0');
  1056. return 0;
  1057. too_long:
  1058. pr_err("alg: generic driver name for \"%s\" would be too long\n",
  1059. algname);
  1060. return -ENAMETOOLONG;
  1061. }
  1062. #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
  1063. static void crypto_disable_simd_for_test(void)
  1064. {
  1065. }
  1066. static void crypto_reenable_simd_for_test(void)
  1067. {
  1068. }
  1069. #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
  1070. static int build_hash_sglist(struct test_sglist *tsgl,
  1071. const struct hash_testvec *vec,
  1072. const struct testvec_config *cfg,
  1073. unsigned int alignmask,
  1074. const struct test_sg_division *divs[XBUFSIZE])
  1075. {
  1076. struct kvec kv;
  1077. struct iov_iter input;
  1078. kv.iov_base = (void *)vec->plaintext;
  1079. kv.iov_len = vec->psize;
  1080. iov_iter_kvec(&input, ITER_SOURCE, &kv, 1, vec->psize);
  1081. return build_test_sglist(tsgl, cfg->src_divs, alignmask, vec->psize,
  1082. &input, divs);
  1083. }
  1084. static int check_hash_result(const char *type,
  1085. const u8 *result, unsigned int digestsize,
  1086. const struct hash_testvec *vec,
  1087. const char *vec_name,
  1088. const char *driver,
  1089. const struct testvec_config *cfg)
  1090. {
  1091. if (memcmp(result, vec->digest, digestsize) != 0) {
  1092. pr_err("alg: %s: %s test failed (wrong result) on test vector %s, cfg=\"%s\"\n",
  1093. type, driver, vec_name, cfg->name);
  1094. return -EINVAL;
  1095. }
  1096. if (!testmgr_is_poison(&result[digestsize], TESTMGR_POISON_LEN)) {
  1097. pr_err("alg: %s: %s overran result buffer on test vector %s, cfg=\"%s\"\n",
  1098. type, driver, vec_name, cfg->name);
  1099. return -EOVERFLOW;
  1100. }
  1101. return 0;
  1102. }
  1103. static inline int check_shash_op(const char *op, int err,
  1104. const char *driver, const char *vec_name,
  1105. const struct testvec_config *cfg)
  1106. {
  1107. if (err)
  1108. pr_err("alg: shash: %s %s() failed with err %d on test vector %s, cfg=\"%s\"\n",
  1109. driver, op, err, vec_name, cfg->name);
  1110. return err;
  1111. }
  1112. /* Test one hash test vector in one configuration, using the shash API */
  1113. static int test_shash_vec_cfg(const struct hash_testvec *vec,
  1114. const char *vec_name,
  1115. const struct testvec_config *cfg,
  1116. struct shash_desc *desc,
  1117. struct test_sglist *tsgl,
  1118. u8 *hashstate)
  1119. {
  1120. struct crypto_shash *tfm = desc->tfm;
  1121. const unsigned int alignmask = crypto_shash_alignmask(tfm);
  1122. const unsigned int digestsize = crypto_shash_digestsize(tfm);
  1123. const unsigned int statesize = crypto_shash_statesize(tfm);
  1124. const char *driver = crypto_shash_driver_name(tfm);
  1125. const struct test_sg_division *divs[XBUFSIZE];
  1126. unsigned int i;
  1127. u8 result[HASH_MAX_DIGESTSIZE + TESTMGR_POISON_LEN];
  1128. int err;
  1129. /* Set the key, if specified */
  1130. if (vec->ksize) {
  1131. err = do_setkey(crypto_shash_setkey, tfm, vec->key, vec->ksize,
  1132. cfg, alignmask);
  1133. if (err) {
  1134. if (err == vec->setkey_error)
  1135. return 0;
  1136. pr_err("alg: shash: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
  1137. driver, vec_name, vec->setkey_error, err,
  1138. crypto_shash_get_flags(tfm));
  1139. return err;
  1140. }
  1141. if (vec->setkey_error) {
  1142. pr_err("alg: shash: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
  1143. driver, vec_name, vec->setkey_error);
  1144. return -EINVAL;
  1145. }
  1146. }
  1147. /* Build the scatterlist for the source data */
  1148. err = build_hash_sglist(tsgl, vec, cfg, alignmask, divs);
  1149. if (err) {
  1150. pr_err("alg: shash: %s: error preparing scatterlist for test vector %s, cfg=\"%s\"\n",
  1151. driver, vec_name, cfg->name);
  1152. return err;
  1153. }
  1154. /* Do the actual hashing */
  1155. testmgr_poison(desc->__ctx, crypto_shash_descsize(tfm));
  1156. testmgr_poison(result, digestsize + TESTMGR_POISON_LEN);
  1157. if (cfg->finalization_type == FINALIZATION_TYPE_DIGEST ||
  1158. vec->digest_error) {
  1159. /* Just using digest() */
  1160. if (tsgl->nents != 1)
  1161. return 0;
  1162. if (cfg->nosimd)
  1163. crypto_disable_simd_for_test();
  1164. err = crypto_shash_digest(desc, sg_virt(&tsgl->sgl[0]),
  1165. tsgl->sgl[0].length, result);
  1166. if (cfg->nosimd)
  1167. crypto_reenable_simd_for_test();
  1168. if (err) {
  1169. if (err == vec->digest_error)
  1170. return 0;
  1171. pr_err("alg: shash: %s digest() failed on test vector %s; expected_error=%d, actual_error=%d, cfg=\"%s\"\n",
  1172. driver, vec_name, vec->digest_error, err,
  1173. cfg->name);
  1174. return err;
  1175. }
  1176. if (vec->digest_error) {
  1177. pr_err("alg: shash: %s digest() unexpectedly succeeded on test vector %s; expected_error=%d, cfg=\"%s\"\n",
  1178. driver, vec_name, vec->digest_error, cfg->name);
  1179. return -EINVAL;
  1180. }
  1181. goto result_ready;
  1182. }
  1183. /* Using init(), zero or more update(), then final() or finup() */
  1184. if (cfg->nosimd)
  1185. crypto_disable_simd_for_test();
  1186. err = crypto_shash_init(desc);
  1187. if (cfg->nosimd)
  1188. crypto_reenable_simd_for_test();
  1189. err = check_shash_op("init", err, driver, vec_name, cfg);
  1190. if (err)
  1191. return err;
  1192. for (i = 0; i < tsgl->nents; i++) {
  1193. if (i + 1 == tsgl->nents &&
  1194. cfg->finalization_type == FINALIZATION_TYPE_FINUP) {
  1195. if (divs[i]->nosimd)
  1196. crypto_disable_simd_for_test();
  1197. err = crypto_shash_finup(desc, sg_virt(&tsgl->sgl[i]),
  1198. tsgl->sgl[i].length, result);
  1199. if (divs[i]->nosimd)
  1200. crypto_reenable_simd_for_test();
  1201. err = check_shash_op("finup", err, driver, vec_name,
  1202. cfg);
  1203. if (err)
  1204. return err;
  1205. goto result_ready;
  1206. }
  1207. if (divs[i]->nosimd)
  1208. crypto_disable_simd_for_test();
  1209. err = crypto_shash_update(desc, sg_virt(&tsgl->sgl[i]),
  1210. tsgl->sgl[i].length);
  1211. if (divs[i]->nosimd)
  1212. crypto_reenable_simd_for_test();
  1213. err = check_shash_op("update", err, driver, vec_name, cfg);
  1214. if (err)
  1215. return err;
  1216. if (divs[i]->flush_type == FLUSH_TYPE_REIMPORT) {
  1217. /* Test ->export() and ->import() */
  1218. testmgr_poison(hashstate + statesize,
  1219. TESTMGR_POISON_LEN);
  1220. err = crypto_shash_export(desc, hashstate);
  1221. err = check_shash_op("export", err, driver, vec_name,
  1222. cfg);
  1223. if (err)
  1224. return err;
  1225. if (!testmgr_is_poison(hashstate + statesize,
  1226. TESTMGR_POISON_LEN)) {
  1227. pr_err("alg: shash: %s export() overran state buffer on test vector %s, cfg=\"%s\"\n",
  1228. driver, vec_name, cfg->name);
  1229. return -EOVERFLOW;
  1230. }
  1231. testmgr_poison(desc->__ctx, crypto_shash_descsize(tfm));
  1232. err = crypto_shash_import(desc, hashstate);
  1233. err = check_shash_op("import", err, driver, vec_name,
  1234. cfg);
  1235. if (err)
  1236. return err;
  1237. }
  1238. }
  1239. if (cfg->nosimd)
  1240. crypto_disable_simd_for_test();
  1241. err = crypto_shash_final(desc, result);
  1242. if (cfg->nosimd)
  1243. crypto_reenable_simd_for_test();
  1244. err = check_shash_op("final", err, driver, vec_name, cfg);
  1245. if (err)
  1246. return err;
  1247. result_ready:
  1248. return check_hash_result("shash", result, digestsize, vec, vec_name,
  1249. driver, cfg);
  1250. }
  1251. static int do_ahash_op(int (*op)(struct ahash_request *req),
  1252. struct ahash_request *req,
  1253. struct crypto_wait *wait, bool nosimd)
  1254. {
  1255. int err;
  1256. if (nosimd)
  1257. crypto_disable_simd_for_test();
  1258. err = op(req);
  1259. if (nosimd)
  1260. crypto_reenable_simd_for_test();
  1261. return crypto_wait_req(err, wait);
  1262. }
  1263. static int check_nonfinal_ahash_op(const char *op, int err,
  1264. u8 *result, unsigned int digestsize,
  1265. const char *driver, const char *vec_name,
  1266. const struct testvec_config *cfg)
  1267. {
  1268. if (err) {
  1269. pr_err("alg: ahash: %s %s() failed with err %d on test vector %s, cfg=\"%s\"\n",
  1270. driver, op, err, vec_name, cfg->name);
  1271. return err;
  1272. }
  1273. if (!testmgr_is_poison(result, digestsize)) {
  1274. pr_err("alg: ahash: %s %s() used result buffer on test vector %s, cfg=\"%s\"\n",
  1275. driver, op, vec_name, cfg->name);
  1276. return -EINVAL;
  1277. }
  1278. return 0;
  1279. }
  1280. /* Test one hash test vector in one configuration, using the ahash API */
  1281. static int test_ahash_vec_cfg(const struct hash_testvec *vec,
  1282. const char *vec_name,
  1283. const struct testvec_config *cfg,
  1284. struct ahash_request *req,
  1285. struct test_sglist *tsgl,
  1286. u8 *hashstate)
  1287. {
  1288. struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
  1289. const unsigned int alignmask = crypto_ahash_alignmask(tfm);
  1290. const unsigned int digestsize = crypto_ahash_digestsize(tfm);
  1291. const unsigned int statesize = crypto_ahash_statesize(tfm);
  1292. const char *driver = crypto_ahash_driver_name(tfm);
  1293. const u32 req_flags = CRYPTO_TFM_REQ_MAY_BACKLOG | cfg->req_flags;
  1294. const struct test_sg_division *divs[XBUFSIZE];
  1295. DECLARE_CRYPTO_WAIT(wait);
  1296. unsigned int i;
  1297. struct scatterlist *pending_sgl;
  1298. unsigned int pending_len;
  1299. u8 result[HASH_MAX_DIGESTSIZE + TESTMGR_POISON_LEN];
  1300. int err;
  1301. /* Set the key, if specified */
  1302. if (vec->ksize) {
  1303. err = do_setkey(crypto_ahash_setkey, tfm, vec->key, vec->ksize,
  1304. cfg, alignmask);
  1305. if (err) {
  1306. if (err == vec->setkey_error)
  1307. return 0;
  1308. pr_err("alg: ahash: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
  1309. driver, vec_name, vec->setkey_error, err,
  1310. crypto_ahash_get_flags(tfm));
  1311. return err;
  1312. }
  1313. if (vec->setkey_error) {
  1314. pr_err("alg: ahash: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
  1315. driver, vec_name, vec->setkey_error);
  1316. return -EINVAL;
  1317. }
  1318. }
  1319. /* Build the scatterlist for the source data */
  1320. err = build_hash_sglist(tsgl, vec, cfg, alignmask, divs);
  1321. if (err) {
  1322. pr_err("alg: ahash: %s: error preparing scatterlist for test vector %s, cfg=\"%s\"\n",
  1323. driver, vec_name, cfg->name);
  1324. return err;
  1325. }
  1326. /* Do the actual hashing */
  1327. testmgr_poison(req->__ctx, crypto_ahash_reqsize(tfm));
  1328. testmgr_poison(result, digestsize + TESTMGR_POISON_LEN);
  1329. if (cfg->finalization_type == FINALIZATION_TYPE_DIGEST ||
  1330. vec->digest_error) {
  1331. /* Just using digest() */
  1332. ahash_request_set_callback(req, req_flags, crypto_req_done,
  1333. &wait);
  1334. ahash_request_set_crypt(req, tsgl->sgl, result, vec->psize);
  1335. err = do_ahash_op(crypto_ahash_digest, req, &wait, cfg->nosimd);
  1336. if (err) {
  1337. if (err == vec->digest_error)
  1338. return 0;
  1339. pr_err("alg: ahash: %s digest() failed on test vector %s; expected_error=%d, actual_error=%d, cfg=\"%s\"\n",
  1340. driver, vec_name, vec->digest_error, err,
  1341. cfg->name);
  1342. return err;
  1343. }
  1344. if (vec->digest_error) {
  1345. pr_err("alg: ahash: %s digest() unexpectedly succeeded on test vector %s; expected_error=%d, cfg=\"%s\"\n",
  1346. driver, vec_name, vec->digest_error, cfg->name);
  1347. return -EINVAL;
  1348. }
  1349. goto result_ready;
  1350. }
  1351. /* Using init(), zero or more update(), then final() or finup() */
  1352. ahash_request_set_callback(req, req_flags, crypto_req_done, &wait);
  1353. ahash_request_set_crypt(req, NULL, result, 0);
  1354. err = do_ahash_op(crypto_ahash_init, req, &wait, cfg->nosimd);
  1355. err = check_nonfinal_ahash_op("init", err, result, digestsize,
  1356. driver, vec_name, cfg);
  1357. if (err)
  1358. return err;
  1359. pending_sgl = NULL;
  1360. pending_len = 0;
  1361. for (i = 0; i < tsgl->nents; i++) {
  1362. if (divs[i]->flush_type != FLUSH_TYPE_NONE &&
  1363. pending_sgl != NULL) {
  1364. /* update() with the pending data */
  1365. ahash_request_set_callback(req, req_flags,
  1366. crypto_req_done, &wait);
  1367. ahash_request_set_crypt(req, pending_sgl, result,
  1368. pending_len);
  1369. err = do_ahash_op(crypto_ahash_update, req, &wait,
  1370. divs[i]->nosimd);
  1371. err = check_nonfinal_ahash_op("update", err,
  1372. result, digestsize,
  1373. driver, vec_name, cfg);
  1374. if (err)
  1375. return err;
  1376. pending_sgl = NULL;
  1377. pending_len = 0;
  1378. }
  1379. if (divs[i]->flush_type == FLUSH_TYPE_REIMPORT) {
  1380. /* Test ->export() and ->import() */
  1381. testmgr_poison(hashstate + statesize,
  1382. TESTMGR_POISON_LEN);
  1383. err = crypto_ahash_export(req, hashstate);
  1384. err = check_nonfinal_ahash_op("export", err,
  1385. result, digestsize,
  1386. driver, vec_name, cfg);
  1387. if (err)
  1388. return err;
  1389. if (!testmgr_is_poison(hashstate + statesize,
  1390. TESTMGR_POISON_LEN)) {
  1391. pr_err("alg: ahash: %s export() overran state buffer on test vector %s, cfg=\"%s\"\n",
  1392. driver, vec_name, cfg->name);
  1393. return -EOVERFLOW;
  1394. }
  1395. testmgr_poison(req->__ctx, crypto_ahash_reqsize(tfm));
  1396. err = crypto_ahash_import(req, hashstate);
  1397. err = check_nonfinal_ahash_op("import", err,
  1398. result, digestsize,
  1399. driver, vec_name, cfg);
  1400. if (err)
  1401. return err;
  1402. }
  1403. if (pending_sgl == NULL)
  1404. pending_sgl = &tsgl->sgl[i];
  1405. pending_len += tsgl->sgl[i].length;
  1406. }
  1407. ahash_request_set_callback(req, req_flags, crypto_req_done, &wait);
  1408. ahash_request_set_crypt(req, pending_sgl, result, pending_len);
  1409. if (cfg->finalization_type == FINALIZATION_TYPE_FINAL) {
  1410. /* finish with update() and final() */
  1411. err = do_ahash_op(crypto_ahash_update, req, &wait, cfg->nosimd);
  1412. err = check_nonfinal_ahash_op("update", err, result, digestsize,
  1413. driver, vec_name, cfg);
  1414. if (err)
  1415. return err;
  1416. err = do_ahash_op(crypto_ahash_final, req, &wait, cfg->nosimd);
  1417. if (err) {
  1418. pr_err("alg: ahash: %s final() failed with err %d on test vector %s, cfg=\"%s\"\n",
  1419. driver, err, vec_name, cfg->name);
  1420. return err;
  1421. }
  1422. } else {
  1423. /* finish with finup() */
  1424. err = do_ahash_op(crypto_ahash_finup, req, &wait, cfg->nosimd);
  1425. if (err) {
  1426. pr_err("alg: ahash: %s finup() failed with err %d on test vector %s, cfg=\"%s\"\n",
  1427. driver, err, vec_name, cfg->name);
  1428. return err;
  1429. }
  1430. }
  1431. result_ready:
  1432. return check_hash_result("ahash", result, digestsize, vec, vec_name,
  1433. driver, cfg);
  1434. }
  1435. static int test_hash_vec_cfg(const struct hash_testvec *vec,
  1436. const char *vec_name,
  1437. const struct testvec_config *cfg,
  1438. struct ahash_request *req,
  1439. struct shash_desc *desc,
  1440. struct test_sglist *tsgl,
  1441. u8 *hashstate)
  1442. {
  1443. int err;
  1444. /*
  1445. * For algorithms implemented as "shash", most bugs will be detected by
  1446. * both the shash and ahash tests. Test the shash API first so that the
  1447. * failures involve less indirection, so are easier to debug.
  1448. */
  1449. if (desc) {
  1450. err = test_shash_vec_cfg(vec, vec_name, cfg, desc, tsgl,
  1451. hashstate);
  1452. if (err)
  1453. return err;
  1454. }
  1455. return test_ahash_vec_cfg(vec, vec_name, cfg, req, tsgl, hashstate);
  1456. }
  1457. static int test_hash_vec(const struct hash_testvec *vec, unsigned int vec_num,
  1458. struct ahash_request *req, struct shash_desc *desc,
  1459. struct test_sglist *tsgl, u8 *hashstate)
  1460. {
  1461. char vec_name[16];
  1462. unsigned int i;
  1463. int err;
  1464. sprintf(vec_name, "%u", vec_num);
  1465. for (i = 0; i < ARRAY_SIZE(default_hash_testvec_configs); i++) {
  1466. err = test_hash_vec_cfg(vec, vec_name,
  1467. &default_hash_testvec_configs[i],
  1468. req, desc, tsgl, hashstate);
  1469. if (err)
  1470. return err;
  1471. }
  1472. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  1473. if (!noextratests) {
  1474. struct rnd_state rng;
  1475. struct testvec_config cfg;
  1476. char cfgname[TESTVEC_CONFIG_NAMELEN];
  1477. init_rnd_state(&rng);
  1478. for (i = 0; i < fuzz_iterations; i++) {
  1479. generate_random_testvec_config(&rng, &cfg, cfgname,
  1480. sizeof(cfgname));
  1481. err = test_hash_vec_cfg(vec, vec_name, &cfg,
  1482. req, desc, tsgl, hashstate);
  1483. if (err)
  1484. return err;
  1485. cond_resched();
  1486. }
  1487. }
  1488. #endif
  1489. return 0;
  1490. }
  1491. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  1492. /*
  1493. * Generate a hash test vector from the given implementation.
  1494. * Assumes the buffers in 'vec' were already allocated.
  1495. */
  1496. static void generate_random_hash_testvec(struct rnd_state *rng,
  1497. struct shash_desc *desc,
  1498. struct hash_testvec *vec,
  1499. unsigned int maxkeysize,
  1500. unsigned int maxdatasize,
  1501. char *name, size_t max_namelen)
  1502. {
  1503. /* Data */
  1504. vec->psize = generate_random_length(rng, maxdatasize);
  1505. generate_random_bytes(rng, (u8 *)vec->plaintext, vec->psize);
  1506. /*
  1507. * Key: length in range [1, maxkeysize], but usually choose maxkeysize.
  1508. * If algorithm is unkeyed, then maxkeysize == 0 and set ksize = 0.
  1509. */
  1510. vec->setkey_error = 0;
  1511. vec->ksize = 0;
  1512. if (maxkeysize) {
  1513. vec->ksize = maxkeysize;
  1514. if (prandom_u32_below(rng, 4) == 0)
  1515. vec->ksize = prandom_u32_inclusive(rng, 1, maxkeysize);
  1516. generate_random_bytes(rng, (u8 *)vec->key, vec->ksize);
  1517. vec->setkey_error = crypto_shash_setkey(desc->tfm, vec->key,
  1518. vec->ksize);
  1519. /* If the key couldn't be set, no need to continue to digest. */
  1520. if (vec->setkey_error)
  1521. goto done;
  1522. }
  1523. /* Digest */
  1524. vec->digest_error = crypto_shash_digest(desc, vec->plaintext,
  1525. vec->psize, (u8 *)vec->digest);
  1526. done:
  1527. snprintf(name, max_namelen, "\"random: psize=%u ksize=%u\"",
  1528. vec->psize, vec->ksize);
  1529. }
  1530. /*
  1531. * Test the hash algorithm represented by @req against the corresponding generic
  1532. * implementation, if one is available.
  1533. */
  1534. static int test_hash_vs_generic_impl(const char *generic_driver,
  1535. unsigned int maxkeysize,
  1536. struct ahash_request *req,
  1537. struct shash_desc *desc,
  1538. struct test_sglist *tsgl,
  1539. u8 *hashstate)
  1540. {
  1541. struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
  1542. const unsigned int digestsize = crypto_ahash_digestsize(tfm);
  1543. const unsigned int blocksize = crypto_ahash_blocksize(tfm);
  1544. const unsigned int maxdatasize = (2 * PAGE_SIZE) - TESTMGR_POISON_LEN;
  1545. const char *algname = crypto_hash_alg_common(tfm)->base.cra_name;
  1546. const char *driver = crypto_ahash_driver_name(tfm);
  1547. struct rnd_state rng;
  1548. char _generic_driver[CRYPTO_MAX_ALG_NAME];
  1549. struct crypto_shash *generic_tfm = NULL;
  1550. struct shash_desc *generic_desc = NULL;
  1551. unsigned int i;
  1552. struct hash_testvec vec = { 0 };
  1553. char vec_name[64];
  1554. struct testvec_config *cfg;
  1555. char cfgname[TESTVEC_CONFIG_NAMELEN];
  1556. int err;
  1557. if (noextratests)
  1558. return 0;
  1559. init_rnd_state(&rng);
  1560. if (!generic_driver) { /* Use default naming convention? */
  1561. err = build_generic_driver_name(algname, _generic_driver);
  1562. if (err)
  1563. return err;
  1564. generic_driver = _generic_driver;
  1565. }
  1566. if (strcmp(generic_driver, driver) == 0) /* Already the generic impl? */
  1567. return 0;
  1568. generic_tfm = crypto_alloc_shash(generic_driver, 0, 0);
  1569. if (IS_ERR(generic_tfm)) {
  1570. err = PTR_ERR(generic_tfm);
  1571. if (err == -ENOENT) {
  1572. pr_warn("alg: hash: skipping comparison tests for %s because %s is unavailable\n",
  1573. driver, generic_driver);
  1574. return 0;
  1575. }
  1576. pr_err("alg: hash: error allocating %s (generic impl of %s): %d\n",
  1577. generic_driver, algname, err);
  1578. return err;
  1579. }
  1580. cfg = kzalloc(sizeof(*cfg), GFP_KERNEL);
  1581. if (!cfg) {
  1582. err = -ENOMEM;
  1583. goto out;
  1584. }
  1585. generic_desc = kzalloc(sizeof(*desc) +
  1586. crypto_shash_descsize(generic_tfm), GFP_KERNEL);
  1587. if (!generic_desc) {
  1588. err = -ENOMEM;
  1589. goto out;
  1590. }
  1591. generic_desc->tfm = generic_tfm;
  1592. /* Check the algorithm properties for consistency. */
  1593. if (digestsize != crypto_shash_digestsize(generic_tfm)) {
  1594. pr_err("alg: hash: digestsize for %s (%u) doesn't match generic impl (%u)\n",
  1595. driver, digestsize,
  1596. crypto_shash_digestsize(generic_tfm));
  1597. err = -EINVAL;
  1598. goto out;
  1599. }
  1600. if (blocksize != crypto_shash_blocksize(generic_tfm)) {
  1601. pr_err("alg: hash: blocksize for %s (%u) doesn't match generic impl (%u)\n",
  1602. driver, blocksize, crypto_shash_blocksize(generic_tfm));
  1603. err = -EINVAL;
  1604. goto out;
  1605. }
  1606. /*
  1607. * Now generate test vectors using the generic implementation, and test
  1608. * the other implementation against them.
  1609. */
  1610. vec.key = kmalloc(maxkeysize, GFP_KERNEL);
  1611. vec.plaintext = kmalloc(maxdatasize, GFP_KERNEL);
  1612. vec.digest = kmalloc(digestsize, GFP_KERNEL);
  1613. if (!vec.key || !vec.plaintext || !vec.digest) {
  1614. err = -ENOMEM;
  1615. goto out;
  1616. }
  1617. for (i = 0; i < fuzz_iterations * 8; i++) {
  1618. generate_random_hash_testvec(&rng, generic_desc, &vec,
  1619. maxkeysize, maxdatasize,
  1620. vec_name, sizeof(vec_name));
  1621. generate_random_testvec_config(&rng, cfg, cfgname,
  1622. sizeof(cfgname));
  1623. err = test_hash_vec_cfg(&vec, vec_name, cfg,
  1624. req, desc, tsgl, hashstate);
  1625. if (err)
  1626. goto out;
  1627. cond_resched();
  1628. }
  1629. err = 0;
  1630. out:
  1631. kfree(cfg);
  1632. kfree(vec.key);
  1633. kfree(vec.plaintext);
  1634. kfree(vec.digest);
  1635. crypto_free_shash(generic_tfm);
  1636. kfree_sensitive(generic_desc);
  1637. return err;
  1638. }
  1639. #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
  1640. static int test_hash_vs_generic_impl(const char *generic_driver,
  1641. unsigned int maxkeysize,
  1642. struct ahash_request *req,
  1643. struct shash_desc *desc,
  1644. struct test_sglist *tsgl,
  1645. u8 *hashstate)
  1646. {
  1647. return 0;
  1648. }
  1649. #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
  1650. static int alloc_shash(const char *driver, u32 type, u32 mask,
  1651. struct crypto_shash **tfm_ret,
  1652. struct shash_desc **desc_ret)
  1653. {
  1654. struct crypto_shash *tfm;
  1655. struct shash_desc *desc;
  1656. tfm = crypto_alloc_shash(driver, type, mask);
  1657. if (IS_ERR(tfm)) {
  1658. if (PTR_ERR(tfm) == -ENOENT) {
  1659. /*
  1660. * This algorithm is only available through the ahash
  1661. * API, not the shash API, so skip the shash tests.
  1662. */
  1663. return 0;
  1664. }
  1665. pr_err("alg: hash: failed to allocate shash transform for %s: %ld\n",
  1666. driver, PTR_ERR(tfm));
  1667. return PTR_ERR(tfm);
  1668. }
  1669. desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(tfm), GFP_KERNEL);
  1670. if (!desc) {
  1671. crypto_free_shash(tfm);
  1672. return -ENOMEM;
  1673. }
  1674. desc->tfm = tfm;
  1675. *tfm_ret = tfm;
  1676. *desc_ret = desc;
  1677. return 0;
  1678. }
  1679. static int __alg_test_hash(const struct hash_testvec *vecs,
  1680. unsigned int num_vecs, const char *driver,
  1681. u32 type, u32 mask,
  1682. const char *generic_driver, unsigned int maxkeysize)
  1683. {
  1684. struct crypto_ahash *atfm = NULL;
  1685. struct ahash_request *req = NULL;
  1686. struct crypto_shash *stfm = NULL;
  1687. struct shash_desc *desc = NULL;
  1688. struct test_sglist *tsgl = NULL;
  1689. u8 *hashstate = NULL;
  1690. unsigned int statesize;
  1691. unsigned int i;
  1692. int err;
  1693. /*
  1694. * Always test the ahash API. This works regardless of whether the
  1695. * algorithm is implemented as ahash or shash.
  1696. */
  1697. atfm = crypto_alloc_ahash(driver, type, mask);
  1698. if (IS_ERR(atfm)) {
  1699. pr_err("alg: hash: failed to allocate transform for %s: %ld\n",
  1700. driver, PTR_ERR(atfm));
  1701. return PTR_ERR(atfm);
  1702. }
  1703. driver = crypto_ahash_driver_name(atfm);
  1704. req = ahash_request_alloc(atfm, GFP_KERNEL);
  1705. if (!req) {
  1706. pr_err("alg: hash: failed to allocate request for %s\n",
  1707. driver);
  1708. err = -ENOMEM;
  1709. goto out;
  1710. }
  1711. /*
  1712. * If available also test the shash API, to cover corner cases that may
  1713. * be missed by testing the ahash API only.
  1714. */
  1715. err = alloc_shash(driver, type, mask, &stfm, &desc);
  1716. if (err)
  1717. goto out;
  1718. tsgl = kmalloc(sizeof(*tsgl), GFP_KERNEL);
  1719. if (!tsgl || init_test_sglist(tsgl) != 0) {
  1720. pr_err("alg: hash: failed to allocate test buffers for %s\n",
  1721. driver);
  1722. kfree(tsgl);
  1723. tsgl = NULL;
  1724. err = -ENOMEM;
  1725. goto out;
  1726. }
  1727. statesize = crypto_ahash_statesize(atfm);
  1728. if (stfm)
  1729. statesize = max(statesize, crypto_shash_statesize(stfm));
  1730. hashstate = kmalloc(statesize + TESTMGR_POISON_LEN, GFP_KERNEL);
  1731. if (!hashstate) {
  1732. pr_err("alg: hash: failed to allocate hash state buffer for %s\n",
  1733. driver);
  1734. err = -ENOMEM;
  1735. goto out;
  1736. }
  1737. for (i = 0; i < num_vecs; i++) {
  1738. if (fips_enabled && vecs[i].fips_skip)
  1739. continue;
  1740. err = test_hash_vec(&vecs[i], i, req, desc, tsgl, hashstate);
  1741. if (err)
  1742. goto out;
  1743. cond_resched();
  1744. }
  1745. err = test_hash_vs_generic_impl(generic_driver, maxkeysize, req,
  1746. desc, tsgl, hashstate);
  1747. out:
  1748. kfree(hashstate);
  1749. if (tsgl) {
  1750. destroy_test_sglist(tsgl);
  1751. kfree(tsgl);
  1752. }
  1753. kfree(desc);
  1754. crypto_free_shash(stfm);
  1755. ahash_request_free(req);
  1756. crypto_free_ahash(atfm);
  1757. return err;
  1758. }
  1759. static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
  1760. u32 type, u32 mask)
  1761. {
  1762. const struct hash_testvec *template = desc->suite.hash.vecs;
  1763. unsigned int tcount = desc->suite.hash.count;
  1764. unsigned int nr_unkeyed, nr_keyed;
  1765. unsigned int maxkeysize = 0;
  1766. int err;
  1767. /*
  1768. * For OPTIONAL_KEY algorithms, we have to do all the unkeyed tests
  1769. * first, before setting a key on the tfm. To make this easier, we
  1770. * require that the unkeyed test vectors (if any) are listed first.
  1771. */
  1772. for (nr_unkeyed = 0; nr_unkeyed < tcount; nr_unkeyed++) {
  1773. if (template[nr_unkeyed].ksize)
  1774. break;
  1775. }
  1776. for (nr_keyed = 0; nr_unkeyed + nr_keyed < tcount; nr_keyed++) {
  1777. if (!template[nr_unkeyed + nr_keyed].ksize) {
  1778. pr_err("alg: hash: test vectors for %s out of order, "
  1779. "unkeyed ones must come first\n", desc->alg);
  1780. return -EINVAL;
  1781. }
  1782. maxkeysize = max_t(unsigned int, maxkeysize,
  1783. template[nr_unkeyed + nr_keyed].ksize);
  1784. }
  1785. err = 0;
  1786. if (nr_unkeyed) {
  1787. err = __alg_test_hash(template, nr_unkeyed, driver, type, mask,
  1788. desc->generic_driver, maxkeysize);
  1789. template += nr_unkeyed;
  1790. }
  1791. if (!err && nr_keyed)
  1792. err = __alg_test_hash(template, nr_keyed, driver, type, mask,
  1793. desc->generic_driver, maxkeysize);
  1794. return err;
  1795. }
  1796. static int test_aead_vec_cfg(int enc, const struct aead_testvec *vec,
  1797. const char *vec_name,
  1798. const struct testvec_config *cfg,
  1799. struct aead_request *req,
  1800. struct cipher_test_sglists *tsgls)
  1801. {
  1802. struct crypto_aead *tfm = crypto_aead_reqtfm(req);
  1803. const unsigned int alignmask = crypto_aead_alignmask(tfm);
  1804. const unsigned int ivsize = crypto_aead_ivsize(tfm);
  1805. const unsigned int authsize = vec->clen - vec->plen;
  1806. const char *driver = crypto_aead_driver_name(tfm);
  1807. const u32 req_flags = CRYPTO_TFM_REQ_MAY_BACKLOG | cfg->req_flags;
  1808. const char *op = enc ? "encryption" : "decryption";
  1809. DECLARE_CRYPTO_WAIT(wait);
  1810. u8 _iv[3 * (MAX_ALGAPI_ALIGNMASK + 1) + MAX_IVLEN];
  1811. u8 *iv = PTR_ALIGN(&_iv[0], 2 * (MAX_ALGAPI_ALIGNMASK + 1)) +
  1812. cfg->iv_offset +
  1813. (cfg->iv_offset_relative_to_alignmask ? alignmask : 0);
  1814. struct kvec input[2];
  1815. int err;
  1816. /* Set the key */
  1817. if (vec->wk)
  1818. crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
  1819. else
  1820. crypto_aead_clear_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
  1821. err = do_setkey(crypto_aead_setkey, tfm, vec->key, vec->klen,
  1822. cfg, alignmask);
  1823. if (err && err != vec->setkey_error) {
  1824. pr_err("alg: aead: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
  1825. driver, vec_name, vec->setkey_error, err,
  1826. crypto_aead_get_flags(tfm));
  1827. return err;
  1828. }
  1829. if (!err && vec->setkey_error) {
  1830. pr_err("alg: aead: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
  1831. driver, vec_name, vec->setkey_error);
  1832. return -EINVAL;
  1833. }
  1834. /* Set the authentication tag size */
  1835. err = crypto_aead_setauthsize(tfm, authsize);
  1836. if (err && err != vec->setauthsize_error) {
  1837. pr_err("alg: aead: %s setauthsize failed on test vector %s; expected_error=%d, actual_error=%d\n",
  1838. driver, vec_name, vec->setauthsize_error, err);
  1839. return err;
  1840. }
  1841. if (!err && vec->setauthsize_error) {
  1842. pr_err("alg: aead: %s setauthsize unexpectedly succeeded on test vector %s; expected_error=%d\n",
  1843. driver, vec_name, vec->setauthsize_error);
  1844. return -EINVAL;
  1845. }
  1846. if (vec->setkey_error || vec->setauthsize_error)
  1847. return 0;
  1848. /* The IV must be copied to a buffer, as the algorithm may modify it */
  1849. if (WARN_ON(ivsize > MAX_IVLEN))
  1850. return -EINVAL;
  1851. if (vec->iv)
  1852. memcpy(iv, vec->iv, ivsize);
  1853. else
  1854. memset(iv, 0, ivsize);
  1855. /* Build the src/dst scatterlists */
  1856. input[0].iov_base = (void *)vec->assoc;
  1857. input[0].iov_len = vec->alen;
  1858. input[1].iov_base = enc ? (void *)vec->ptext : (void *)vec->ctext;
  1859. input[1].iov_len = enc ? vec->plen : vec->clen;
  1860. err = build_cipher_test_sglists(tsgls, cfg, alignmask,
  1861. vec->alen + (enc ? vec->plen :
  1862. vec->clen),
  1863. vec->alen + (enc ? vec->clen :
  1864. vec->plen),
  1865. input, 2);
  1866. if (err) {
  1867. pr_err("alg: aead: %s %s: error preparing scatterlists for test vector %s, cfg=\"%s\"\n",
  1868. driver, op, vec_name, cfg->name);
  1869. return err;
  1870. }
  1871. /* Do the actual encryption or decryption */
  1872. testmgr_poison(req->__ctx, crypto_aead_reqsize(tfm));
  1873. aead_request_set_callback(req, req_flags, crypto_req_done, &wait);
  1874. aead_request_set_crypt(req, tsgls->src.sgl_ptr, tsgls->dst.sgl_ptr,
  1875. enc ? vec->plen : vec->clen, iv);
  1876. aead_request_set_ad(req, vec->alen);
  1877. if (cfg->nosimd)
  1878. crypto_disable_simd_for_test();
  1879. err = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
  1880. if (cfg->nosimd)
  1881. crypto_reenable_simd_for_test();
  1882. err = crypto_wait_req(err, &wait);
  1883. /* Check that the algorithm didn't overwrite things it shouldn't have */
  1884. if (req->cryptlen != (enc ? vec->plen : vec->clen) ||
  1885. req->assoclen != vec->alen ||
  1886. req->iv != iv ||
  1887. req->src != tsgls->src.sgl_ptr ||
  1888. req->dst != tsgls->dst.sgl_ptr ||
  1889. crypto_aead_reqtfm(req) != tfm ||
  1890. req->base.complete != crypto_req_done ||
  1891. req->base.flags != req_flags ||
  1892. req->base.data != &wait) {
  1893. pr_err("alg: aead: %s %s corrupted request struct on test vector %s, cfg=\"%s\"\n",
  1894. driver, op, vec_name, cfg->name);
  1895. if (req->cryptlen != (enc ? vec->plen : vec->clen))
  1896. pr_err("alg: aead: changed 'req->cryptlen'\n");
  1897. if (req->assoclen != vec->alen)
  1898. pr_err("alg: aead: changed 'req->assoclen'\n");
  1899. if (req->iv != iv)
  1900. pr_err("alg: aead: changed 'req->iv'\n");
  1901. if (req->src != tsgls->src.sgl_ptr)
  1902. pr_err("alg: aead: changed 'req->src'\n");
  1903. if (req->dst != tsgls->dst.sgl_ptr)
  1904. pr_err("alg: aead: changed 'req->dst'\n");
  1905. if (crypto_aead_reqtfm(req) != tfm)
  1906. pr_err("alg: aead: changed 'req->base.tfm'\n");
  1907. if (req->base.complete != crypto_req_done)
  1908. pr_err("alg: aead: changed 'req->base.complete'\n");
  1909. if (req->base.flags != req_flags)
  1910. pr_err("alg: aead: changed 'req->base.flags'\n");
  1911. if (req->base.data != &wait)
  1912. pr_err("alg: aead: changed 'req->base.data'\n");
  1913. return -EINVAL;
  1914. }
  1915. if (is_test_sglist_corrupted(&tsgls->src)) {
  1916. pr_err("alg: aead: %s %s corrupted src sgl on test vector %s, cfg=\"%s\"\n",
  1917. driver, op, vec_name, cfg->name);
  1918. return -EINVAL;
  1919. }
  1920. if (tsgls->dst.sgl_ptr != tsgls->src.sgl &&
  1921. is_test_sglist_corrupted(&tsgls->dst)) {
  1922. pr_err("alg: aead: %s %s corrupted dst sgl on test vector %s, cfg=\"%s\"\n",
  1923. driver, op, vec_name, cfg->name);
  1924. return -EINVAL;
  1925. }
  1926. /* Check for unexpected success or failure, or wrong error code */
  1927. if ((err == 0 && vec->novrfy) ||
  1928. (err != vec->crypt_error && !(err == -EBADMSG && vec->novrfy))) {
  1929. char expected_error[32];
  1930. if (vec->novrfy &&
  1931. vec->crypt_error != 0 && vec->crypt_error != -EBADMSG)
  1932. sprintf(expected_error, "-EBADMSG or %d",
  1933. vec->crypt_error);
  1934. else if (vec->novrfy)
  1935. sprintf(expected_error, "-EBADMSG");
  1936. else
  1937. sprintf(expected_error, "%d", vec->crypt_error);
  1938. if (err) {
  1939. pr_err("alg: aead: %s %s failed on test vector %s; expected_error=%s, actual_error=%d, cfg=\"%s\"\n",
  1940. driver, op, vec_name, expected_error, err,
  1941. cfg->name);
  1942. return err;
  1943. }
  1944. pr_err("alg: aead: %s %s unexpectedly succeeded on test vector %s; expected_error=%s, cfg=\"%s\"\n",
  1945. driver, op, vec_name, expected_error, cfg->name);
  1946. return -EINVAL;
  1947. }
  1948. if (err) /* Expectedly failed. */
  1949. return 0;
  1950. /* Check for the correct output (ciphertext or plaintext) */
  1951. err = verify_correct_output(&tsgls->dst, enc ? vec->ctext : vec->ptext,
  1952. enc ? vec->clen : vec->plen,
  1953. vec->alen,
  1954. enc || cfg->inplace_mode == OUT_OF_PLACE);
  1955. if (err == -EOVERFLOW) {
  1956. pr_err("alg: aead: %s %s overran dst buffer on test vector %s, cfg=\"%s\"\n",
  1957. driver, op, vec_name, cfg->name);
  1958. return err;
  1959. }
  1960. if (err) {
  1961. pr_err("alg: aead: %s %s test failed (wrong result) on test vector %s, cfg=\"%s\"\n",
  1962. driver, op, vec_name, cfg->name);
  1963. return err;
  1964. }
  1965. return 0;
  1966. }
  1967. static int test_aead_vec(int enc, const struct aead_testvec *vec,
  1968. unsigned int vec_num, struct aead_request *req,
  1969. struct cipher_test_sglists *tsgls)
  1970. {
  1971. char vec_name[16];
  1972. unsigned int i;
  1973. int err;
  1974. if (enc && vec->novrfy)
  1975. return 0;
  1976. sprintf(vec_name, "%u", vec_num);
  1977. for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++) {
  1978. err = test_aead_vec_cfg(enc, vec, vec_name,
  1979. &default_cipher_testvec_configs[i],
  1980. req, tsgls);
  1981. if (err)
  1982. return err;
  1983. }
  1984. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  1985. if (!noextratests) {
  1986. struct rnd_state rng;
  1987. struct testvec_config cfg;
  1988. char cfgname[TESTVEC_CONFIG_NAMELEN];
  1989. init_rnd_state(&rng);
  1990. for (i = 0; i < fuzz_iterations; i++) {
  1991. generate_random_testvec_config(&rng, &cfg, cfgname,
  1992. sizeof(cfgname));
  1993. err = test_aead_vec_cfg(enc, vec, vec_name,
  1994. &cfg, req, tsgls);
  1995. if (err)
  1996. return err;
  1997. cond_resched();
  1998. }
  1999. }
  2000. #endif
  2001. return 0;
  2002. }
  2003. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  2004. struct aead_extra_tests_ctx {
  2005. struct rnd_state rng;
  2006. struct aead_request *req;
  2007. struct crypto_aead *tfm;
  2008. const struct alg_test_desc *test_desc;
  2009. struct cipher_test_sglists *tsgls;
  2010. unsigned int maxdatasize;
  2011. unsigned int maxkeysize;
  2012. struct aead_testvec vec;
  2013. char vec_name[64];
  2014. char cfgname[TESTVEC_CONFIG_NAMELEN];
  2015. struct testvec_config cfg;
  2016. };
  2017. /*
  2018. * Make at least one random change to a (ciphertext, AAD) pair. "Ciphertext"
  2019. * here means the full ciphertext including the authentication tag. The
  2020. * authentication tag (and hence also the ciphertext) is assumed to be nonempty.
  2021. */
  2022. static void mutate_aead_message(struct rnd_state *rng,
  2023. struct aead_testvec *vec, bool aad_iv,
  2024. unsigned int ivsize)
  2025. {
  2026. const unsigned int aad_tail_size = aad_iv ? ivsize : 0;
  2027. const unsigned int authsize = vec->clen - vec->plen;
  2028. if (prandom_bool(rng) && vec->alen > aad_tail_size) {
  2029. /* Mutate the AAD */
  2030. flip_random_bit(rng, (u8 *)vec->assoc,
  2031. vec->alen - aad_tail_size);
  2032. if (prandom_bool(rng))
  2033. return;
  2034. }
  2035. if (prandom_bool(rng)) {
  2036. /* Mutate auth tag (assuming it's at the end of ciphertext) */
  2037. flip_random_bit(rng, (u8 *)vec->ctext + vec->plen, authsize);
  2038. } else {
  2039. /* Mutate any part of the ciphertext */
  2040. flip_random_bit(rng, (u8 *)vec->ctext, vec->clen);
  2041. }
  2042. }
  2043. /*
  2044. * Minimum authentication tag size in bytes at which we assume that we can
  2045. * reliably generate inauthentic messages, i.e. not generate an authentic
  2046. * message by chance.
  2047. */
  2048. #define MIN_COLLISION_FREE_AUTHSIZE 8
  2049. static void generate_aead_message(struct rnd_state *rng,
  2050. struct aead_request *req,
  2051. const struct aead_test_suite *suite,
  2052. struct aead_testvec *vec,
  2053. bool prefer_inauthentic)
  2054. {
  2055. struct crypto_aead *tfm = crypto_aead_reqtfm(req);
  2056. const unsigned int ivsize = crypto_aead_ivsize(tfm);
  2057. const unsigned int authsize = vec->clen - vec->plen;
  2058. const bool inauthentic = (authsize >= MIN_COLLISION_FREE_AUTHSIZE) &&
  2059. (prefer_inauthentic ||
  2060. prandom_u32_below(rng, 4) == 0);
  2061. /* Generate the AAD. */
  2062. generate_random_bytes(rng, (u8 *)vec->assoc, vec->alen);
  2063. if (suite->aad_iv && vec->alen >= ivsize)
  2064. /* Avoid implementation-defined behavior. */
  2065. memcpy((u8 *)vec->assoc + vec->alen - ivsize, vec->iv, ivsize);
  2066. if (inauthentic && prandom_bool(rng)) {
  2067. /* Generate a random ciphertext. */
  2068. generate_random_bytes(rng, (u8 *)vec->ctext, vec->clen);
  2069. } else {
  2070. int i = 0;
  2071. struct scatterlist src[2], dst;
  2072. u8 iv[MAX_IVLEN];
  2073. DECLARE_CRYPTO_WAIT(wait);
  2074. /* Generate a random plaintext and encrypt it. */
  2075. sg_init_table(src, 2);
  2076. if (vec->alen)
  2077. sg_set_buf(&src[i++], vec->assoc, vec->alen);
  2078. if (vec->plen) {
  2079. generate_random_bytes(rng, (u8 *)vec->ptext, vec->plen);
  2080. sg_set_buf(&src[i++], vec->ptext, vec->plen);
  2081. }
  2082. sg_init_one(&dst, vec->ctext, vec->alen + vec->clen);
  2083. memcpy(iv, vec->iv, ivsize);
  2084. aead_request_set_callback(req, 0, crypto_req_done, &wait);
  2085. aead_request_set_crypt(req, src, &dst, vec->plen, iv);
  2086. aead_request_set_ad(req, vec->alen);
  2087. vec->crypt_error = crypto_wait_req(crypto_aead_encrypt(req),
  2088. &wait);
  2089. /* If encryption failed, we're done. */
  2090. if (vec->crypt_error != 0)
  2091. return;
  2092. memmove((u8 *)vec->ctext, vec->ctext + vec->alen, vec->clen);
  2093. if (!inauthentic)
  2094. return;
  2095. /*
  2096. * Mutate the authentic (ciphertext, AAD) pair to get an
  2097. * inauthentic one.
  2098. */
  2099. mutate_aead_message(rng, vec, suite->aad_iv, ivsize);
  2100. }
  2101. vec->novrfy = 1;
  2102. if (suite->einval_allowed)
  2103. vec->crypt_error = -EINVAL;
  2104. }
  2105. /*
  2106. * Generate an AEAD test vector 'vec' using the implementation specified by
  2107. * 'req'. The buffers in 'vec' must already be allocated.
  2108. *
  2109. * If 'prefer_inauthentic' is true, then this function will generate inauthentic
  2110. * test vectors (i.e. vectors with 'vec->novrfy=1') more often.
  2111. */
  2112. static void generate_random_aead_testvec(struct rnd_state *rng,
  2113. struct aead_request *req,
  2114. struct aead_testvec *vec,
  2115. const struct aead_test_suite *suite,
  2116. unsigned int maxkeysize,
  2117. unsigned int maxdatasize,
  2118. char *name, size_t max_namelen,
  2119. bool prefer_inauthentic)
  2120. {
  2121. struct crypto_aead *tfm = crypto_aead_reqtfm(req);
  2122. const unsigned int ivsize = crypto_aead_ivsize(tfm);
  2123. const unsigned int maxauthsize = crypto_aead_maxauthsize(tfm);
  2124. unsigned int authsize;
  2125. unsigned int total_len;
  2126. /* Key: length in [0, maxkeysize], but usually choose maxkeysize */
  2127. vec->klen = maxkeysize;
  2128. if (prandom_u32_below(rng, 4) == 0)
  2129. vec->klen = prandom_u32_below(rng, maxkeysize + 1);
  2130. generate_random_bytes(rng, (u8 *)vec->key, vec->klen);
  2131. vec->setkey_error = crypto_aead_setkey(tfm, vec->key, vec->klen);
  2132. /* IV */
  2133. generate_random_bytes(rng, (u8 *)vec->iv, ivsize);
  2134. /* Tag length: in [0, maxauthsize], but usually choose maxauthsize */
  2135. authsize = maxauthsize;
  2136. if (prandom_u32_below(rng, 4) == 0)
  2137. authsize = prandom_u32_below(rng, maxauthsize + 1);
  2138. if (prefer_inauthentic && authsize < MIN_COLLISION_FREE_AUTHSIZE)
  2139. authsize = MIN_COLLISION_FREE_AUTHSIZE;
  2140. if (WARN_ON(authsize > maxdatasize))
  2141. authsize = maxdatasize;
  2142. maxdatasize -= authsize;
  2143. vec->setauthsize_error = crypto_aead_setauthsize(tfm, authsize);
  2144. /* AAD, plaintext, and ciphertext lengths */
  2145. total_len = generate_random_length(rng, maxdatasize);
  2146. if (prandom_u32_below(rng, 4) == 0)
  2147. vec->alen = 0;
  2148. else
  2149. vec->alen = generate_random_length(rng, total_len);
  2150. vec->plen = total_len - vec->alen;
  2151. vec->clen = vec->plen + authsize;
  2152. /*
  2153. * Generate the AAD, plaintext, and ciphertext. Not applicable if the
  2154. * key or the authentication tag size couldn't be set.
  2155. */
  2156. vec->novrfy = 0;
  2157. vec->crypt_error = 0;
  2158. if (vec->setkey_error == 0 && vec->setauthsize_error == 0)
  2159. generate_aead_message(rng, req, suite, vec, prefer_inauthentic);
  2160. snprintf(name, max_namelen,
  2161. "\"random: alen=%u plen=%u authsize=%u klen=%u novrfy=%d\"",
  2162. vec->alen, vec->plen, authsize, vec->klen, vec->novrfy);
  2163. }
  2164. static void try_to_generate_inauthentic_testvec(
  2165. struct aead_extra_tests_ctx *ctx)
  2166. {
  2167. int i;
  2168. for (i = 0; i < 10; i++) {
  2169. generate_random_aead_testvec(&ctx->rng, ctx->req, &ctx->vec,
  2170. &ctx->test_desc->suite.aead,
  2171. ctx->maxkeysize, ctx->maxdatasize,
  2172. ctx->vec_name,
  2173. sizeof(ctx->vec_name), true);
  2174. if (ctx->vec.novrfy)
  2175. return;
  2176. }
  2177. }
  2178. /*
  2179. * Generate inauthentic test vectors (i.e. ciphertext, AAD pairs that aren't the
  2180. * result of an encryption with the key) and verify that decryption fails.
  2181. */
  2182. static int test_aead_inauthentic_inputs(struct aead_extra_tests_ctx *ctx)
  2183. {
  2184. unsigned int i;
  2185. int err;
  2186. for (i = 0; i < fuzz_iterations * 8; i++) {
  2187. /*
  2188. * Since this part of the tests isn't comparing the
  2189. * implementation to another, there's no point in testing any
  2190. * test vectors other than inauthentic ones (vec.novrfy=1) here.
  2191. *
  2192. * If we're having trouble generating such a test vector, e.g.
  2193. * if the algorithm keeps rejecting the generated keys, don't
  2194. * retry forever; just continue on.
  2195. */
  2196. try_to_generate_inauthentic_testvec(ctx);
  2197. if (ctx->vec.novrfy) {
  2198. generate_random_testvec_config(&ctx->rng, &ctx->cfg,
  2199. ctx->cfgname,
  2200. sizeof(ctx->cfgname));
  2201. err = test_aead_vec_cfg(DECRYPT, &ctx->vec,
  2202. ctx->vec_name, &ctx->cfg,
  2203. ctx->req, ctx->tsgls);
  2204. if (err)
  2205. return err;
  2206. }
  2207. cond_resched();
  2208. }
  2209. return 0;
  2210. }
  2211. /*
  2212. * Test the AEAD algorithm against the corresponding generic implementation, if
  2213. * one is available.
  2214. */
  2215. static int test_aead_vs_generic_impl(struct aead_extra_tests_ctx *ctx)
  2216. {
  2217. struct crypto_aead *tfm = ctx->tfm;
  2218. const char *algname = crypto_aead_alg(tfm)->base.cra_name;
  2219. const char *driver = crypto_aead_driver_name(tfm);
  2220. const char *generic_driver = ctx->test_desc->generic_driver;
  2221. char _generic_driver[CRYPTO_MAX_ALG_NAME];
  2222. struct crypto_aead *generic_tfm = NULL;
  2223. struct aead_request *generic_req = NULL;
  2224. unsigned int i;
  2225. int err;
  2226. if (!generic_driver) { /* Use default naming convention? */
  2227. err = build_generic_driver_name(algname, _generic_driver);
  2228. if (err)
  2229. return err;
  2230. generic_driver = _generic_driver;
  2231. }
  2232. if (strcmp(generic_driver, driver) == 0) /* Already the generic impl? */
  2233. return 0;
  2234. generic_tfm = crypto_alloc_aead(generic_driver, 0, 0);
  2235. if (IS_ERR(generic_tfm)) {
  2236. err = PTR_ERR(generic_tfm);
  2237. if (err == -ENOENT) {
  2238. pr_warn("alg: aead: skipping comparison tests for %s because %s is unavailable\n",
  2239. driver, generic_driver);
  2240. return 0;
  2241. }
  2242. pr_err("alg: aead: error allocating %s (generic impl of %s): %d\n",
  2243. generic_driver, algname, err);
  2244. return err;
  2245. }
  2246. generic_req = aead_request_alloc(generic_tfm, GFP_KERNEL);
  2247. if (!generic_req) {
  2248. err = -ENOMEM;
  2249. goto out;
  2250. }
  2251. /* Check the algorithm properties for consistency. */
  2252. if (crypto_aead_maxauthsize(tfm) !=
  2253. crypto_aead_maxauthsize(generic_tfm)) {
  2254. pr_err("alg: aead: maxauthsize for %s (%u) doesn't match generic impl (%u)\n",
  2255. driver, crypto_aead_maxauthsize(tfm),
  2256. crypto_aead_maxauthsize(generic_tfm));
  2257. err = -EINVAL;
  2258. goto out;
  2259. }
  2260. if (crypto_aead_ivsize(tfm) != crypto_aead_ivsize(generic_tfm)) {
  2261. pr_err("alg: aead: ivsize for %s (%u) doesn't match generic impl (%u)\n",
  2262. driver, crypto_aead_ivsize(tfm),
  2263. crypto_aead_ivsize(generic_tfm));
  2264. err = -EINVAL;
  2265. goto out;
  2266. }
  2267. if (crypto_aead_blocksize(tfm) != crypto_aead_blocksize(generic_tfm)) {
  2268. pr_err("alg: aead: blocksize for %s (%u) doesn't match generic impl (%u)\n",
  2269. driver, crypto_aead_blocksize(tfm),
  2270. crypto_aead_blocksize(generic_tfm));
  2271. err = -EINVAL;
  2272. goto out;
  2273. }
  2274. /*
  2275. * Now generate test vectors using the generic implementation, and test
  2276. * the other implementation against them.
  2277. */
  2278. for (i = 0; i < fuzz_iterations * 8; i++) {
  2279. generate_random_aead_testvec(&ctx->rng, generic_req, &ctx->vec,
  2280. &ctx->test_desc->suite.aead,
  2281. ctx->maxkeysize, ctx->maxdatasize,
  2282. ctx->vec_name,
  2283. sizeof(ctx->vec_name), false);
  2284. generate_random_testvec_config(&ctx->rng, &ctx->cfg,
  2285. ctx->cfgname,
  2286. sizeof(ctx->cfgname));
  2287. if (!ctx->vec.novrfy) {
  2288. err = test_aead_vec_cfg(ENCRYPT, &ctx->vec,
  2289. ctx->vec_name, &ctx->cfg,
  2290. ctx->req, ctx->tsgls);
  2291. if (err)
  2292. goto out;
  2293. }
  2294. if (ctx->vec.crypt_error == 0 || ctx->vec.novrfy) {
  2295. err = test_aead_vec_cfg(DECRYPT, &ctx->vec,
  2296. ctx->vec_name, &ctx->cfg,
  2297. ctx->req, ctx->tsgls);
  2298. if (err)
  2299. goto out;
  2300. }
  2301. cond_resched();
  2302. }
  2303. err = 0;
  2304. out:
  2305. crypto_free_aead(generic_tfm);
  2306. aead_request_free(generic_req);
  2307. return err;
  2308. }
  2309. static int test_aead_extra(const struct alg_test_desc *test_desc,
  2310. struct aead_request *req,
  2311. struct cipher_test_sglists *tsgls)
  2312. {
  2313. struct aead_extra_tests_ctx *ctx;
  2314. unsigned int i;
  2315. int err;
  2316. if (noextratests)
  2317. return 0;
  2318. ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
  2319. if (!ctx)
  2320. return -ENOMEM;
  2321. init_rnd_state(&ctx->rng);
  2322. ctx->req = req;
  2323. ctx->tfm = crypto_aead_reqtfm(req);
  2324. ctx->test_desc = test_desc;
  2325. ctx->tsgls = tsgls;
  2326. ctx->maxdatasize = (2 * PAGE_SIZE) - TESTMGR_POISON_LEN;
  2327. ctx->maxkeysize = 0;
  2328. for (i = 0; i < test_desc->suite.aead.count; i++)
  2329. ctx->maxkeysize = max_t(unsigned int, ctx->maxkeysize,
  2330. test_desc->suite.aead.vecs[i].klen);
  2331. ctx->vec.key = kmalloc(ctx->maxkeysize, GFP_KERNEL);
  2332. ctx->vec.iv = kmalloc(crypto_aead_ivsize(ctx->tfm), GFP_KERNEL);
  2333. ctx->vec.assoc = kmalloc(ctx->maxdatasize, GFP_KERNEL);
  2334. ctx->vec.ptext = kmalloc(ctx->maxdatasize, GFP_KERNEL);
  2335. ctx->vec.ctext = kmalloc(ctx->maxdatasize, GFP_KERNEL);
  2336. if (!ctx->vec.key || !ctx->vec.iv || !ctx->vec.assoc ||
  2337. !ctx->vec.ptext || !ctx->vec.ctext) {
  2338. err = -ENOMEM;
  2339. goto out;
  2340. }
  2341. err = test_aead_vs_generic_impl(ctx);
  2342. if (err)
  2343. goto out;
  2344. err = test_aead_inauthentic_inputs(ctx);
  2345. out:
  2346. kfree(ctx->vec.key);
  2347. kfree(ctx->vec.iv);
  2348. kfree(ctx->vec.assoc);
  2349. kfree(ctx->vec.ptext);
  2350. kfree(ctx->vec.ctext);
  2351. kfree(ctx);
  2352. return err;
  2353. }
  2354. #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
  2355. static int test_aead_extra(const struct alg_test_desc *test_desc,
  2356. struct aead_request *req,
  2357. struct cipher_test_sglists *tsgls)
  2358. {
  2359. return 0;
  2360. }
  2361. #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
  2362. static int test_aead(int enc, const struct aead_test_suite *suite,
  2363. struct aead_request *req,
  2364. struct cipher_test_sglists *tsgls)
  2365. {
  2366. unsigned int i;
  2367. int err;
  2368. for (i = 0; i < suite->count; i++) {
  2369. err = test_aead_vec(enc, &suite->vecs[i], i, req, tsgls);
  2370. if (err)
  2371. return err;
  2372. cond_resched();
  2373. }
  2374. return 0;
  2375. }
  2376. static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
  2377. u32 type, u32 mask)
  2378. {
  2379. const struct aead_test_suite *suite = &desc->suite.aead;
  2380. struct crypto_aead *tfm;
  2381. struct aead_request *req = NULL;
  2382. struct cipher_test_sglists *tsgls = NULL;
  2383. int err;
  2384. if (suite->count <= 0) {
  2385. pr_err("alg: aead: empty test suite for %s\n", driver);
  2386. return -EINVAL;
  2387. }
  2388. tfm = crypto_alloc_aead(driver, type, mask);
  2389. if (IS_ERR(tfm)) {
  2390. pr_err("alg: aead: failed to allocate transform for %s: %ld\n",
  2391. driver, PTR_ERR(tfm));
  2392. return PTR_ERR(tfm);
  2393. }
  2394. driver = crypto_aead_driver_name(tfm);
  2395. req = aead_request_alloc(tfm, GFP_KERNEL);
  2396. if (!req) {
  2397. pr_err("alg: aead: failed to allocate request for %s\n",
  2398. driver);
  2399. err = -ENOMEM;
  2400. goto out;
  2401. }
  2402. tsgls = alloc_cipher_test_sglists();
  2403. if (!tsgls) {
  2404. pr_err("alg: aead: failed to allocate test buffers for %s\n",
  2405. driver);
  2406. err = -ENOMEM;
  2407. goto out;
  2408. }
  2409. err = test_aead(ENCRYPT, suite, req, tsgls);
  2410. if (err)
  2411. goto out;
  2412. err = test_aead(DECRYPT, suite, req, tsgls);
  2413. if (err)
  2414. goto out;
  2415. err = test_aead_extra(desc, req, tsgls);
  2416. out:
  2417. free_cipher_test_sglists(tsgls);
  2418. aead_request_free(req);
  2419. crypto_free_aead(tfm);
  2420. return err;
  2421. }
  2422. static int test_cipher(struct crypto_cipher *tfm, int enc,
  2423. const struct cipher_testvec *template,
  2424. unsigned int tcount)
  2425. {
  2426. const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
  2427. unsigned int i, j, k;
  2428. char *q;
  2429. const char *e;
  2430. const char *input, *result;
  2431. void *data;
  2432. char *xbuf[XBUFSIZE];
  2433. int ret = -ENOMEM;
  2434. if (testmgr_alloc_buf(xbuf))
  2435. goto out_nobuf;
  2436. if (enc == ENCRYPT)
  2437. e = "encryption";
  2438. else
  2439. e = "decryption";
  2440. j = 0;
  2441. for (i = 0; i < tcount; i++) {
  2442. if (fips_enabled && template[i].fips_skip)
  2443. continue;
  2444. input = enc ? template[i].ptext : template[i].ctext;
  2445. result = enc ? template[i].ctext : template[i].ptext;
  2446. j++;
  2447. ret = -EINVAL;
  2448. if (WARN_ON(template[i].len > PAGE_SIZE))
  2449. goto out;
  2450. data = xbuf[0];
  2451. memcpy(data, input, template[i].len);
  2452. crypto_cipher_clear_flags(tfm, ~0);
  2453. if (template[i].wk)
  2454. crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
  2455. ret = crypto_cipher_setkey(tfm, template[i].key,
  2456. template[i].klen);
  2457. if (ret) {
  2458. if (ret == template[i].setkey_error)
  2459. continue;
  2460. pr_err("alg: cipher: %s setkey failed on test vector %u; expected_error=%d, actual_error=%d, flags=%#x\n",
  2461. algo, j, template[i].setkey_error, ret,
  2462. crypto_cipher_get_flags(tfm));
  2463. goto out;
  2464. }
  2465. if (template[i].setkey_error) {
  2466. pr_err("alg: cipher: %s setkey unexpectedly succeeded on test vector %u; expected_error=%d\n",
  2467. algo, j, template[i].setkey_error);
  2468. ret = -EINVAL;
  2469. goto out;
  2470. }
  2471. for (k = 0; k < template[i].len;
  2472. k += crypto_cipher_blocksize(tfm)) {
  2473. if (enc)
  2474. crypto_cipher_encrypt_one(tfm, data + k,
  2475. data + k);
  2476. else
  2477. crypto_cipher_decrypt_one(tfm, data + k,
  2478. data + k);
  2479. }
  2480. q = data;
  2481. if (memcmp(q, result, template[i].len)) {
  2482. printk(KERN_ERR "alg: cipher: Test %d failed "
  2483. "on %s for %s\n", j, e, algo);
  2484. hexdump(q, template[i].len);
  2485. ret = -EINVAL;
  2486. goto out;
  2487. }
  2488. }
  2489. ret = 0;
  2490. out:
  2491. testmgr_free_buf(xbuf);
  2492. out_nobuf:
  2493. return ret;
  2494. }
  2495. static int test_skcipher_vec_cfg(int enc, const struct cipher_testvec *vec,
  2496. const char *vec_name,
  2497. const struct testvec_config *cfg,
  2498. struct skcipher_request *req,
  2499. struct cipher_test_sglists *tsgls)
  2500. {
  2501. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  2502. const unsigned int alignmask = crypto_skcipher_alignmask(tfm);
  2503. const unsigned int ivsize = crypto_skcipher_ivsize(tfm);
  2504. const char *driver = crypto_skcipher_driver_name(tfm);
  2505. const u32 req_flags = CRYPTO_TFM_REQ_MAY_BACKLOG | cfg->req_flags;
  2506. const char *op = enc ? "encryption" : "decryption";
  2507. DECLARE_CRYPTO_WAIT(wait);
  2508. u8 _iv[3 * (MAX_ALGAPI_ALIGNMASK + 1) + MAX_IVLEN];
  2509. u8 *iv = PTR_ALIGN(&_iv[0], 2 * (MAX_ALGAPI_ALIGNMASK + 1)) +
  2510. cfg->iv_offset +
  2511. (cfg->iv_offset_relative_to_alignmask ? alignmask : 0);
  2512. struct kvec input;
  2513. int err;
  2514. /* Set the key */
  2515. if (vec->wk)
  2516. crypto_skcipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
  2517. else
  2518. crypto_skcipher_clear_flags(tfm,
  2519. CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
  2520. err = do_setkey(crypto_skcipher_setkey, tfm, vec->key, vec->klen,
  2521. cfg, alignmask);
  2522. if (err) {
  2523. if (err == vec->setkey_error)
  2524. return 0;
  2525. pr_err("alg: skcipher: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
  2526. driver, vec_name, vec->setkey_error, err,
  2527. crypto_skcipher_get_flags(tfm));
  2528. return err;
  2529. }
  2530. if (vec->setkey_error) {
  2531. pr_err("alg: skcipher: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
  2532. driver, vec_name, vec->setkey_error);
  2533. return -EINVAL;
  2534. }
  2535. /* The IV must be copied to a buffer, as the algorithm may modify it */
  2536. if (ivsize) {
  2537. if (WARN_ON(ivsize > MAX_IVLEN))
  2538. return -EINVAL;
  2539. if (vec->generates_iv && !enc)
  2540. memcpy(iv, vec->iv_out, ivsize);
  2541. else if (vec->iv)
  2542. memcpy(iv, vec->iv, ivsize);
  2543. else
  2544. memset(iv, 0, ivsize);
  2545. } else {
  2546. if (vec->generates_iv) {
  2547. pr_err("alg: skcipher: %s has ivsize=0 but test vector %s generates IV!\n",
  2548. driver, vec_name);
  2549. return -EINVAL;
  2550. }
  2551. iv = NULL;
  2552. }
  2553. /* Build the src/dst scatterlists */
  2554. input.iov_base = enc ? (void *)vec->ptext : (void *)vec->ctext;
  2555. input.iov_len = vec->len;
  2556. err = build_cipher_test_sglists(tsgls, cfg, alignmask,
  2557. vec->len, vec->len, &input, 1);
  2558. if (err) {
  2559. pr_err("alg: skcipher: %s %s: error preparing scatterlists for test vector %s, cfg=\"%s\"\n",
  2560. driver, op, vec_name, cfg->name);
  2561. return err;
  2562. }
  2563. /* Do the actual encryption or decryption */
  2564. testmgr_poison(req->__ctx, crypto_skcipher_reqsize(tfm));
  2565. skcipher_request_set_callback(req, req_flags, crypto_req_done, &wait);
  2566. skcipher_request_set_crypt(req, tsgls->src.sgl_ptr, tsgls->dst.sgl_ptr,
  2567. vec->len, iv);
  2568. if (cfg->nosimd)
  2569. crypto_disable_simd_for_test();
  2570. err = enc ? crypto_skcipher_encrypt(req) : crypto_skcipher_decrypt(req);
  2571. if (cfg->nosimd)
  2572. crypto_reenable_simd_for_test();
  2573. err = crypto_wait_req(err, &wait);
  2574. /* Check that the algorithm didn't overwrite things it shouldn't have */
  2575. if (req->cryptlen != vec->len ||
  2576. req->iv != iv ||
  2577. req->src != tsgls->src.sgl_ptr ||
  2578. req->dst != tsgls->dst.sgl_ptr ||
  2579. crypto_skcipher_reqtfm(req) != tfm ||
  2580. req->base.complete != crypto_req_done ||
  2581. req->base.flags != req_flags ||
  2582. req->base.data != &wait) {
  2583. pr_err("alg: skcipher: %s %s corrupted request struct on test vector %s, cfg=\"%s\"\n",
  2584. driver, op, vec_name, cfg->name);
  2585. if (req->cryptlen != vec->len)
  2586. pr_err("alg: skcipher: changed 'req->cryptlen'\n");
  2587. if (req->iv != iv)
  2588. pr_err("alg: skcipher: changed 'req->iv'\n");
  2589. if (req->src != tsgls->src.sgl_ptr)
  2590. pr_err("alg: skcipher: changed 'req->src'\n");
  2591. if (req->dst != tsgls->dst.sgl_ptr)
  2592. pr_err("alg: skcipher: changed 'req->dst'\n");
  2593. if (crypto_skcipher_reqtfm(req) != tfm)
  2594. pr_err("alg: skcipher: changed 'req->base.tfm'\n");
  2595. if (req->base.complete != crypto_req_done)
  2596. pr_err("alg: skcipher: changed 'req->base.complete'\n");
  2597. if (req->base.flags != req_flags)
  2598. pr_err("alg: skcipher: changed 'req->base.flags'\n");
  2599. if (req->base.data != &wait)
  2600. pr_err("alg: skcipher: changed 'req->base.data'\n");
  2601. return -EINVAL;
  2602. }
  2603. if (is_test_sglist_corrupted(&tsgls->src)) {
  2604. pr_err("alg: skcipher: %s %s corrupted src sgl on test vector %s, cfg=\"%s\"\n",
  2605. driver, op, vec_name, cfg->name);
  2606. return -EINVAL;
  2607. }
  2608. if (tsgls->dst.sgl_ptr != tsgls->src.sgl &&
  2609. is_test_sglist_corrupted(&tsgls->dst)) {
  2610. pr_err("alg: skcipher: %s %s corrupted dst sgl on test vector %s, cfg=\"%s\"\n",
  2611. driver, op, vec_name, cfg->name);
  2612. return -EINVAL;
  2613. }
  2614. /* Check for success or failure */
  2615. if (err) {
  2616. if (err == vec->crypt_error)
  2617. return 0;
  2618. pr_err("alg: skcipher: %s %s failed on test vector %s; expected_error=%d, actual_error=%d, cfg=\"%s\"\n",
  2619. driver, op, vec_name, vec->crypt_error, err, cfg->name);
  2620. return err;
  2621. }
  2622. if (vec->crypt_error) {
  2623. pr_err("alg: skcipher: %s %s unexpectedly succeeded on test vector %s; expected_error=%d, cfg=\"%s\"\n",
  2624. driver, op, vec_name, vec->crypt_error, cfg->name);
  2625. return -EINVAL;
  2626. }
  2627. /* Check for the correct output (ciphertext or plaintext) */
  2628. err = verify_correct_output(&tsgls->dst, enc ? vec->ctext : vec->ptext,
  2629. vec->len, 0, true);
  2630. if (err == -EOVERFLOW) {
  2631. pr_err("alg: skcipher: %s %s overran dst buffer on test vector %s, cfg=\"%s\"\n",
  2632. driver, op, vec_name, cfg->name);
  2633. return err;
  2634. }
  2635. if (err) {
  2636. pr_err("alg: skcipher: %s %s test failed (wrong result) on test vector %s, cfg=\"%s\"\n",
  2637. driver, op, vec_name, cfg->name);
  2638. return err;
  2639. }
  2640. /* If applicable, check that the algorithm generated the correct IV */
  2641. if (vec->iv_out && memcmp(iv, vec->iv_out, ivsize) != 0) {
  2642. pr_err("alg: skcipher: %s %s test failed (wrong output IV) on test vector %s, cfg=\"%s\"\n",
  2643. driver, op, vec_name, cfg->name);
  2644. hexdump(iv, ivsize);
  2645. return -EINVAL;
  2646. }
  2647. return 0;
  2648. }
  2649. static int test_skcipher_vec(int enc, const struct cipher_testvec *vec,
  2650. unsigned int vec_num,
  2651. struct skcipher_request *req,
  2652. struct cipher_test_sglists *tsgls)
  2653. {
  2654. char vec_name[16];
  2655. unsigned int i;
  2656. int err;
  2657. if (fips_enabled && vec->fips_skip)
  2658. return 0;
  2659. sprintf(vec_name, "%u", vec_num);
  2660. for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++) {
  2661. err = test_skcipher_vec_cfg(enc, vec, vec_name,
  2662. &default_cipher_testvec_configs[i],
  2663. req, tsgls);
  2664. if (err)
  2665. return err;
  2666. }
  2667. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  2668. if (!noextratests) {
  2669. struct rnd_state rng;
  2670. struct testvec_config cfg;
  2671. char cfgname[TESTVEC_CONFIG_NAMELEN];
  2672. init_rnd_state(&rng);
  2673. for (i = 0; i < fuzz_iterations; i++) {
  2674. generate_random_testvec_config(&rng, &cfg, cfgname,
  2675. sizeof(cfgname));
  2676. err = test_skcipher_vec_cfg(enc, vec, vec_name,
  2677. &cfg, req, tsgls);
  2678. if (err)
  2679. return err;
  2680. cond_resched();
  2681. }
  2682. }
  2683. #endif
  2684. return 0;
  2685. }
  2686. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  2687. /*
  2688. * Generate a symmetric cipher test vector from the given implementation.
  2689. * Assumes the buffers in 'vec' were already allocated.
  2690. */
  2691. static void generate_random_cipher_testvec(struct rnd_state *rng,
  2692. struct skcipher_request *req,
  2693. struct cipher_testvec *vec,
  2694. unsigned int maxdatasize,
  2695. char *name, size_t max_namelen)
  2696. {
  2697. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  2698. const unsigned int maxkeysize = crypto_skcipher_max_keysize(tfm);
  2699. const unsigned int ivsize = crypto_skcipher_ivsize(tfm);
  2700. struct scatterlist src, dst;
  2701. u8 iv[MAX_IVLEN];
  2702. DECLARE_CRYPTO_WAIT(wait);
  2703. /* Key: length in [0, maxkeysize], but usually choose maxkeysize */
  2704. vec->klen = maxkeysize;
  2705. if (prandom_u32_below(rng, 4) == 0)
  2706. vec->klen = prandom_u32_below(rng, maxkeysize + 1);
  2707. generate_random_bytes(rng, (u8 *)vec->key, vec->klen);
  2708. vec->setkey_error = crypto_skcipher_setkey(tfm, vec->key, vec->klen);
  2709. /* IV */
  2710. generate_random_bytes(rng, (u8 *)vec->iv, ivsize);
  2711. /* Plaintext */
  2712. vec->len = generate_random_length(rng, maxdatasize);
  2713. generate_random_bytes(rng, (u8 *)vec->ptext, vec->len);
  2714. /* If the key couldn't be set, no need to continue to encrypt. */
  2715. if (vec->setkey_error)
  2716. goto done;
  2717. /* Ciphertext */
  2718. sg_init_one(&src, vec->ptext, vec->len);
  2719. sg_init_one(&dst, vec->ctext, vec->len);
  2720. memcpy(iv, vec->iv, ivsize);
  2721. skcipher_request_set_callback(req, 0, crypto_req_done, &wait);
  2722. skcipher_request_set_crypt(req, &src, &dst, vec->len, iv);
  2723. vec->crypt_error = crypto_wait_req(crypto_skcipher_encrypt(req), &wait);
  2724. if (vec->crypt_error != 0) {
  2725. /*
  2726. * The only acceptable error here is for an invalid length, so
  2727. * skcipher decryption should fail with the same error too.
  2728. * We'll test for this. But to keep the API usage well-defined,
  2729. * explicitly initialize the ciphertext buffer too.
  2730. */
  2731. memset((u8 *)vec->ctext, 0, vec->len);
  2732. }
  2733. done:
  2734. snprintf(name, max_namelen, "\"random: len=%u klen=%u\"",
  2735. vec->len, vec->klen);
  2736. }
  2737. /*
  2738. * Test the skcipher algorithm represented by @req against the corresponding
  2739. * generic implementation, if one is available.
  2740. */
  2741. static int test_skcipher_vs_generic_impl(const char *generic_driver,
  2742. struct skcipher_request *req,
  2743. struct cipher_test_sglists *tsgls)
  2744. {
  2745. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  2746. const unsigned int maxkeysize = crypto_skcipher_max_keysize(tfm);
  2747. const unsigned int ivsize = crypto_skcipher_ivsize(tfm);
  2748. const unsigned int blocksize = crypto_skcipher_blocksize(tfm);
  2749. const unsigned int maxdatasize = (2 * PAGE_SIZE) - TESTMGR_POISON_LEN;
  2750. const char *algname = crypto_skcipher_alg(tfm)->base.cra_name;
  2751. const char *driver = crypto_skcipher_driver_name(tfm);
  2752. struct rnd_state rng;
  2753. char _generic_driver[CRYPTO_MAX_ALG_NAME];
  2754. struct crypto_skcipher *generic_tfm = NULL;
  2755. struct skcipher_request *generic_req = NULL;
  2756. unsigned int i;
  2757. struct cipher_testvec vec = { 0 };
  2758. char vec_name[64];
  2759. struct testvec_config *cfg;
  2760. char cfgname[TESTVEC_CONFIG_NAMELEN];
  2761. int err;
  2762. if (noextratests)
  2763. return 0;
  2764. /* Keywrap isn't supported here yet as it handles its IV differently. */
  2765. if (strncmp(algname, "kw(", 3) == 0)
  2766. return 0;
  2767. init_rnd_state(&rng);
  2768. if (!generic_driver) { /* Use default naming convention? */
  2769. err = build_generic_driver_name(algname, _generic_driver);
  2770. if (err)
  2771. return err;
  2772. generic_driver = _generic_driver;
  2773. }
  2774. if (strcmp(generic_driver, driver) == 0) /* Already the generic impl? */
  2775. return 0;
  2776. generic_tfm = crypto_alloc_skcipher(generic_driver, 0, 0);
  2777. if (IS_ERR(generic_tfm)) {
  2778. err = PTR_ERR(generic_tfm);
  2779. if (err == -ENOENT) {
  2780. pr_warn("alg: skcipher: skipping comparison tests for %s because %s is unavailable\n",
  2781. driver, generic_driver);
  2782. return 0;
  2783. }
  2784. pr_err("alg: skcipher: error allocating %s (generic impl of %s): %d\n",
  2785. generic_driver, algname, err);
  2786. return err;
  2787. }
  2788. cfg = kzalloc(sizeof(*cfg), GFP_KERNEL);
  2789. if (!cfg) {
  2790. err = -ENOMEM;
  2791. goto out;
  2792. }
  2793. generic_req = skcipher_request_alloc(generic_tfm, GFP_KERNEL);
  2794. if (!generic_req) {
  2795. err = -ENOMEM;
  2796. goto out;
  2797. }
  2798. /* Check the algorithm properties for consistency. */
  2799. if (crypto_skcipher_min_keysize(tfm) !=
  2800. crypto_skcipher_min_keysize(generic_tfm)) {
  2801. pr_err("alg: skcipher: min keysize for %s (%u) doesn't match generic impl (%u)\n",
  2802. driver, crypto_skcipher_min_keysize(tfm),
  2803. crypto_skcipher_min_keysize(generic_tfm));
  2804. err = -EINVAL;
  2805. goto out;
  2806. }
  2807. if (maxkeysize != crypto_skcipher_max_keysize(generic_tfm)) {
  2808. pr_err("alg: skcipher: max keysize for %s (%u) doesn't match generic impl (%u)\n",
  2809. driver, maxkeysize,
  2810. crypto_skcipher_max_keysize(generic_tfm));
  2811. err = -EINVAL;
  2812. goto out;
  2813. }
  2814. if (ivsize != crypto_skcipher_ivsize(generic_tfm)) {
  2815. pr_err("alg: skcipher: ivsize for %s (%u) doesn't match generic impl (%u)\n",
  2816. driver, ivsize, crypto_skcipher_ivsize(generic_tfm));
  2817. err = -EINVAL;
  2818. goto out;
  2819. }
  2820. if (blocksize != crypto_skcipher_blocksize(generic_tfm)) {
  2821. pr_err("alg: skcipher: blocksize for %s (%u) doesn't match generic impl (%u)\n",
  2822. driver, blocksize,
  2823. crypto_skcipher_blocksize(generic_tfm));
  2824. err = -EINVAL;
  2825. goto out;
  2826. }
  2827. /*
  2828. * Now generate test vectors using the generic implementation, and test
  2829. * the other implementation against them.
  2830. */
  2831. vec.key = kmalloc(maxkeysize, GFP_KERNEL);
  2832. vec.iv = kmalloc(ivsize, GFP_KERNEL);
  2833. vec.ptext = kmalloc(maxdatasize, GFP_KERNEL);
  2834. vec.ctext = kmalloc(maxdatasize, GFP_KERNEL);
  2835. if (!vec.key || !vec.iv || !vec.ptext || !vec.ctext) {
  2836. err = -ENOMEM;
  2837. goto out;
  2838. }
  2839. for (i = 0; i < fuzz_iterations * 8; i++) {
  2840. generate_random_cipher_testvec(&rng, generic_req, &vec,
  2841. maxdatasize,
  2842. vec_name, sizeof(vec_name));
  2843. generate_random_testvec_config(&rng, cfg, cfgname,
  2844. sizeof(cfgname));
  2845. err = test_skcipher_vec_cfg(ENCRYPT, &vec, vec_name,
  2846. cfg, req, tsgls);
  2847. if (err)
  2848. goto out;
  2849. err = test_skcipher_vec_cfg(DECRYPT, &vec, vec_name,
  2850. cfg, req, tsgls);
  2851. if (err)
  2852. goto out;
  2853. cond_resched();
  2854. }
  2855. err = 0;
  2856. out:
  2857. kfree(cfg);
  2858. kfree(vec.key);
  2859. kfree(vec.iv);
  2860. kfree(vec.ptext);
  2861. kfree(vec.ctext);
  2862. crypto_free_skcipher(generic_tfm);
  2863. skcipher_request_free(generic_req);
  2864. return err;
  2865. }
  2866. #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
  2867. static int test_skcipher_vs_generic_impl(const char *generic_driver,
  2868. struct skcipher_request *req,
  2869. struct cipher_test_sglists *tsgls)
  2870. {
  2871. return 0;
  2872. }
  2873. #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
  2874. static int test_skcipher(int enc, const struct cipher_test_suite *suite,
  2875. struct skcipher_request *req,
  2876. struct cipher_test_sglists *tsgls)
  2877. {
  2878. unsigned int i;
  2879. int err;
  2880. for (i = 0; i < suite->count; i++) {
  2881. err = test_skcipher_vec(enc, &suite->vecs[i], i, req, tsgls);
  2882. if (err)
  2883. return err;
  2884. cond_resched();
  2885. }
  2886. return 0;
  2887. }
  2888. static int alg_test_skcipher(const struct alg_test_desc *desc,
  2889. const char *driver, u32 type, u32 mask)
  2890. {
  2891. const struct cipher_test_suite *suite = &desc->suite.cipher;
  2892. struct crypto_skcipher *tfm;
  2893. struct skcipher_request *req = NULL;
  2894. struct cipher_test_sglists *tsgls = NULL;
  2895. int err;
  2896. if (suite->count <= 0) {
  2897. pr_err("alg: skcipher: empty test suite for %s\n", driver);
  2898. return -EINVAL;
  2899. }
  2900. tfm = crypto_alloc_skcipher(driver, type, mask);
  2901. if (IS_ERR(tfm)) {
  2902. pr_err("alg: skcipher: failed to allocate transform for %s: %ld\n",
  2903. driver, PTR_ERR(tfm));
  2904. return PTR_ERR(tfm);
  2905. }
  2906. driver = crypto_skcipher_driver_name(tfm);
  2907. req = skcipher_request_alloc(tfm, GFP_KERNEL);
  2908. if (!req) {
  2909. pr_err("alg: skcipher: failed to allocate request for %s\n",
  2910. driver);
  2911. err = -ENOMEM;
  2912. goto out;
  2913. }
  2914. tsgls = alloc_cipher_test_sglists();
  2915. if (!tsgls) {
  2916. pr_err("alg: skcipher: failed to allocate test buffers for %s\n",
  2917. driver);
  2918. err = -ENOMEM;
  2919. goto out;
  2920. }
  2921. err = test_skcipher(ENCRYPT, suite, req, tsgls);
  2922. if (err)
  2923. goto out;
  2924. err = test_skcipher(DECRYPT, suite, req, tsgls);
  2925. if (err)
  2926. goto out;
  2927. err = test_skcipher_vs_generic_impl(desc->generic_driver, req, tsgls);
  2928. out:
  2929. free_cipher_test_sglists(tsgls);
  2930. skcipher_request_free(req);
  2931. crypto_free_skcipher(tfm);
  2932. return err;
  2933. }
  2934. static int test_comp(struct crypto_comp *tfm,
  2935. const struct comp_testvec *ctemplate,
  2936. const struct comp_testvec *dtemplate,
  2937. int ctcount, int dtcount)
  2938. {
  2939. const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
  2940. char *output, *decomp_output;
  2941. unsigned int i;
  2942. int ret;
  2943. output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
  2944. if (!output)
  2945. return -ENOMEM;
  2946. decomp_output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
  2947. if (!decomp_output) {
  2948. kfree(output);
  2949. return -ENOMEM;
  2950. }
  2951. for (i = 0; i < ctcount; i++) {
  2952. int ilen;
  2953. unsigned int dlen = COMP_BUF_SIZE;
  2954. memset(output, 0, COMP_BUF_SIZE);
  2955. memset(decomp_output, 0, COMP_BUF_SIZE);
  2956. ilen = ctemplate[i].inlen;
  2957. ret = crypto_comp_compress(tfm, ctemplate[i].input,
  2958. ilen, output, &dlen);
  2959. if (ret) {
  2960. printk(KERN_ERR "alg: comp: compression failed "
  2961. "on test %d for %s: ret=%d\n", i + 1, algo,
  2962. -ret);
  2963. goto out;
  2964. }
  2965. ilen = dlen;
  2966. dlen = COMP_BUF_SIZE;
  2967. ret = crypto_comp_decompress(tfm, output,
  2968. ilen, decomp_output, &dlen);
  2969. if (ret) {
  2970. pr_err("alg: comp: compression failed: decompress: on test %d for %s failed: ret=%d\n",
  2971. i + 1, algo, -ret);
  2972. goto out;
  2973. }
  2974. if (dlen != ctemplate[i].inlen) {
  2975. printk(KERN_ERR "alg: comp: Compression test %d "
  2976. "failed for %s: output len = %d\n", i + 1, algo,
  2977. dlen);
  2978. ret = -EINVAL;
  2979. goto out;
  2980. }
  2981. if (memcmp(decomp_output, ctemplate[i].input,
  2982. ctemplate[i].inlen)) {
  2983. pr_err("alg: comp: compression failed: output differs: on test %d for %s\n",
  2984. i + 1, algo);
  2985. hexdump(decomp_output, dlen);
  2986. ret = -EINVAL;
  2987. goto out;
  2988. }
  2989. }
  2990. for (i = 0; i < dtcount; i++) {
  2991. int ilen;
  2992. unsigned int dlen = COMP_BUF_SIZE;
  2993. memset(decomp_output, 0, COMP_BUF_SIZE);
  2994. ilen = dtemplate[i].inlen;
  2995. ret = crypto_comp_decompress(tfm, dtemplate[i].input,
  2996. ilen, decomp_output, &dlen);
  2997. if (ret) {
  2998. printk(KERN_ERR "alg: comp: decompression failed "
  2999. "on test %d for %s: ret=%d\n", i + 1, algo,
  3000. -ret);
  3001. goto out;
  3002. }
  3003. if (dlen != dtemplate[i].outlen) {
  3004. printk(KERN_ERR "alg: comp: Decompression test %d "
  3005. "failed for %s: output len = %d\n", i + 1, algo,
  3006. dlen);
  3007. ret = -EINVAL;
  3008. goto out;
  3009. }
  3010. if (memcmp(decomp_output, dtemplate[i].output, dlen)) {
  3011. printk(KERN_ERR "alg: comp: Decompression test %d "
  3012. "failed for %s\n", i + 1, algo);
  3013. hexdump(decomp_output, dlen);
  3014. ret = -EINVAL;
  3015. goto out;
  3016. }
  3017. }
  3018. ret = 0;
  3019. out:
  3020. kfree(decomp_output);
  3021. kfree(output);
  3022. return ret;
  3023. }
  3024. static int test_acomp(struct crypto_acomp *tfm,
  3025. const struct comp_testvec *ctemplate,
  3026. const struct comp_testvec *dtemplate,
  3027. int ctcount, int dtcount)
  3028. {
  3029. const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm));
  3030. unsigned int i;
  3031. char *output, *decomp_out;
  3032. int ret;
  3033. struct scatterlist src, dst;
  3034. struct acomp_req *req;
  3035. struct crypto_wait wait;
  3036. output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
  3037. if (!output)
  3038. return -ENOMEM;
  3039. decomp_out = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
  3040. if (!decomp_out) {
  3041. kfree(output);
  3042. return -ENOMEM;
  3043. }
  3044. for (i = 0; i < ctcount; i++) {
  3045. unsigned int dlen = COMP_BUF_SIZE;
  3046. int ilen = ctemplate[i].inlen;
  3047. void *input_vec;
  3048. input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL);
  3049. if (!input_vec) {
  3050. ret = -ENOMEM;
  3051. goto out;
  3052. }
  3053. memset(output, 0, dlen);
  3054. crypto_init_wait(&wait);
  3055. sg_init_one(&src, input_vec, ilen);
  3056. sg_init_one(&dst, output, dlen);
  3057. req = acomp_request_alloc(tfm);
  3058. if (!req) {
  3059. pr_err("alg: acomp: request alloc failed for %s\n",
  3060. algo);
  3061. kfree(input_vec);
  3062. ret = -ENOMEM;
  3063. goto out;
  3064. }
  3065. acomp_request_set_params(req, &src, &dst, ilen, dlen);
  3066. acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  3067. crypto_req_done, &wait);
  3068. ret = crypto_wait_req(crypto_acomp_compress(req), &wait);
  3069. if (ret) {
  3070. pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
  3071. i + 1, algo, -ret);
  3072. kfree(input_vec);
  3073. acomp_request_free(req);
  3074. goto out;
  3075. }
  3076. ilen = req->dlen;
  3077. dlen = COMP_BUF_SIZE;
  3078. sg_init_one(&src, output, ilen);
  3079. sg_init_one(&dst, decomp_out, dlen);
  3080. crypto_init_wait(&wait);
  3081. acomp_request_set_params(req, &src, &dst, ilen, dlen);
  3082. ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
  3083. if (ret) {
  3084. pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
  3085. i + 1, algo, -ret);
  3086. kfree(input_vec);
  3087. acomp_request_free(req);
  3088. goto out;
  3089. }
  3090. if (req->dlen != ctemplate[i].inlen) {
  3091. pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n",
  3092. i + 1, algo, req->dlen);
  3093. ret = -EINVAL;
  3094. kfree(input_vec);
  3095. acomp_request_free(req);
  3096. goto out;
  3097. }
  3098. if (memcmp(input_vec, decomp_out, req->dlen)) {
  3099. pr_err("alg: acomp: Compression test %d failed for %s\n",
  3100. i + 1, algo);
  3101. hexdump(output, req->dlen);
  3102. ret = -EINVAL;
  3103. kfree(input_vec);
  3104. acomp_request_free(req);
  3105. goto out;
  3106. }
  3107. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  3108. crypto_init_wait(&wait);
  3109. sg_init_one(&src, input_vec, ilen);
  3110. acomp_request_set_params(req, &src, NULL, ilen, 0);
  3111. ret = crypto_wait_req(crypto_acomp_compress(req), &wait);
  3112. if (ret) {
  3113. pr_err("alg: acomp: compression failed on NULL dst buffer test %d for %s: ret=%d\n",
  3114. i + 1, algo, -ret);
  3115. kfree(input_vec);
  3116. acomp_request_free(req);
  3117. goto out;
  3118. }
  3119. #endif
  3120. kfree(input_vec);
  3121. acomp_request_free(req);
  3122. }
  3123. for (i = 0; i < dtcount; i++) {
  3124. unsigned int dlen = COMP_BUF_SIZE;
  3125. int ilen = dtemplate[i].inlen;
  3126. void *input_vec;
  3127. input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL);
  3128. if (!input_vec) {
  3129. ret = -ENOMEM;
  3130. goto out;
  3131. }
  3132. memset(output, 0, dlen);
  3133. crypto_init_wait(&wait);
  3134. sg_init_one(&src, input_vec, ilen);
  3135. sg_init_one(&dst, output, dlen);
  3136. req = acomp_request_alloc(tfm);
  3137. if (!req) {
  3138. pr_err("alg: acomp: request alloc failed for %s\n",
  3139. algo);
  3140. kfree(input_vec);
  3141. ret = -ENOMEM;
  3142. goto out;
  3143. }
  3144. acomp_request_set_params(req, &src, &dst, ilen, dlen);
  3145. acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  3146. crypto_req_done, &wait);
  3147. ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
  3148. if (ret) {
  3149. pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
  3150. i + 1, algo, -ret);
  3151. kfree(input_vec);
  3152. acomp_request_free(req);
  3153. goto out;
  3154. }
  3155. if (req->dlen != dtemplate[i].outlen) {
  3156. pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n",
  3157. i + 1, algo, req->dlen);
  3158. ret = -EINVAL;
  3159. kfree(input_vec);
  3160. acomp_request_free(req);
  3161. goto out;
  3162. }
  3163. if (memcmp(output, dtemplate[i].output, req->dlen)) {
  3164. pr_err("alg: acomp: Decompression test %d failed for %s\n",
  3165. i + 1, algo);
  3166. hexdump(output, req->dlen);
  3167. ret = -EINVAL;
  3168. kfree(input_vec);
  3169. acomp_request_free(req);
  3170. goto out;
  3171. }
  3172. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  3173. crypto_init_wait(&wait);
  3174. acomp_request_set_params(req, &src, NULL, ilen, 0);
  3175. ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
  3176. if (ret) {
  3177. pr_err("alg: acomp: decompression failed on NULL dst buffer test %d for %s: ret=%d\n",
  3178. i + 1, algo, -ret);
  3179. kfree(input_vec);
  3180. acomp_request_free(req);
  3181. goto out;
  3182. }
  3183. #endif
  3184. kfree(input_vec);
  3185. acomp_request_free(req);
  3186. }
  3187. ret = 0;
  3188. out:
  3189. kfree(decomp_out);
  3190. kfree(output);
  3191. return ret;
  3192. }
  3193. static int test_cprng(struct crypto_rng *tfm,
  3194. const struct cprng_testvec *template,
  3195. unsigned int tcount)
  3196. {
  3197. const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
  3198. int err = 0, i, j, seedsize;
  3199. u8 *seed;
  3200. char result[32];
  3201. seedsize = crypto_rng_seedsize(tfm);
  3202. seed = kmalloc(seedsize, GFP_KERNEL);
  3203. if (!seed) {
  3204. printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
  3205. "for %s\n", algo);
  3206. return -ENOMEM;
  3207. }
  3208. for (i = 0; i < tcount; i++) {
  3209. memset(result, 0, 32);
  3210. memcpy(seed, template[i].v, template[i].vlen);
  3211. memcpy(seed + template[i].vlen, template[i].key,
  3212. template[i].klen);
  3213. memcpy(seed + template[i].vlen + template[i].klen,
  3214. template[i].dt, template[i].dtlen);
  3215. err = crypto_rng_reset(tfm, seed, seedsize);
  3216. if (err) {
  3217. printk(KERN_ERR "alg: cprng: Failed to reset rng "
  3218. "for %s\n", algo);
  3219. goto out;
  3220. }
  3221. for (j = 0; j < template[i].loops; j++) {
  3222. err = crypto_rng_get_bytes(tfm, result,
  3223. template[i].rlen);
  3224. if (err < 0) {
  3225. printk(KERN_ERR "alg: cprng: Failed to obtain "
  3226. "the correct amount of random data for "
  3227. "%s (requested %d)\n", algo,
  3228. template[i].rlen);
  3229. goto out;
  3230. }
  3231. }
  3232. err = memcmp(result, template[i].result,
  3233. template[i].rlen);
  3234. if (err) {
  3235. printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
  3236. i, algo);
  3237. hexdump(result, template[i].rlen);
  3238. err = -EINVAL;
  3239. goto out;
  3240. }
  3241. }
  3242. out:
  3243. kfree(seed);
  3244. return err;
  3245. }
  3246. static int alg_test_cipher(const struct alg_test_desc *desc,
  3247. const char *driver, u32 type, u32 mask)
  3248. {
  3249. const struct cipher_test_suite *suite = &desc->suite.cipher;
  3250. struct crypto_cipher *tfm;
  3251. int err;
  3252. tfm = crypto_alloc_cipher(driver, type, mask);
  3253. if (IS_ERR(tfm)) {
  3254. printk(KERN_ERR "alg: cipher: Failed to load transform for "
  3255. "%s: %ld\n", driver, PTR_ERR(tfm));
  3256. return PTR_ERR(tfm);
  3257. }
  3258. err = test_cipher(tfm, ENCRYPT, suite->vecs, suite->count);
  3259. if (!err)
  3260. err = test_cipher(tfm, DECRYPT, suite->vecs, suite->count);
  3261. crypto_free_cipher(tfm);
  3262. return err;
  3263. }
  3264. static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
  3265. u32 type, u32 mask)
  3266. {
  3267. struct crypto_comp *comp;
  3268. struct crypto_acomp *acomp;
  3269. int err;
  3270. u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK;
  3271. if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) {
  3272. acomp = crypto_alloc_acomp(driver, type, mask);
  3273. if (IS_ERR(acomp)) {
  3274. pr_err("alg: acomp: Failed to load transform for %s: %ld\n",
  3275. driver, PTR_ERR(acomp));
  3276. return PTR_ERR(acomp);
  3277. }
  3278. err = test_acomp(acomp, desc->suite.comp.comp.vecs,
  3279. desc->suite.comp.decomp.vecs,
  3280. desc->suite.comp.comp.count,
  3281. desc->suite.comp.decomp.count);
  3282. crypto_free_acomp(acomp);
  3283. } else {
  3284. comp = crypto_alloc_comp(driver, type, mask);
  3285. if (IS_ERR(comp)) {
  3286. pr_err("alg: comp: Failed to load transform for %s: %ld\n",
  3287. driver, PTR_ERR(comp));
  3288. return PTR_ERR(comp);
  3289. }
  3290. err = test_comp(comp, desc->suite.comp.comp.vecs,
  3291. desc->suite.comp.decomp.vecs,
  3292. desc->suite.comp.comp.count,
  3293. desc->suite.comp.decomp.count);
  3294. crypto_free_comp(comp);
  3295. }
  3296. return err;
  3297. }
  3298. static int alg_test_crc32c(const struct alg_test_desc *desc,
  3299. const char *driver, u32 type, u32 mask)
  3300. {
  3301. struct crypto_shash *tfm;
  3302. __le32 val;
  3303. int err;
  3304. err = alg_test_hash(desc, driver, type, mask);
  3305. if (err)
  3306. return err;
  3307. tfm = crypto_alloc_shash(driver, type, mask);
  3308. if (IS_ERR(tfm)) {
  3309. if (PTR_ERR(tfm) == -ENOENT) {
  3310. /*
  3311. * This crc32c implementation is only available through
  3312. * ahash API, not the shash API, so the remaining part
  3313. * of the test is not applicable to it.
  3314. */
  3315. return 0;
  3316. }
  3317. printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
  3318. "%ld\n", driver, PTR_ERR(tfm));
  3319. return PTR_ERR(tfm);
  3320. }
  3321. driver = crypto_shash_driver_name(tfm);
  3322. do {
  3323. SHASH_DESC_ON_STACK(shash, tfm);
  3324. u32 *ctx = (u32 *)shash_desc_ctx(shash);
  3325. shash->tfm = tfm;
  3326. *ctx = 420553207;
  3327. err = crypto_shash_final(shash, (u8 *)&val);
  3328. if (err) {
  3329. printk(KERN_ERR "alg: crc32c: Operation failed for "
  3330. "%s: %d\n", driver, err);
  3331. break;
  3332. }
  3333. if (val != cpu_to_le32(~420553207)) {
  3334. pr_err("alg: crc32c: Test failed for %s: %u\n",
  3335. driver, le32_to_cpu(val));
  3336. err = -EINVAL;
  3337. }
  3338. } while (0);
  3339. crypto_free_shash(tfm);
  3340. return err;
  3341. }
  3342. static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
  3343. u32 type, u32 mask)
  3344. {
  3345. struct crypto_rng *rng;
  3346. int err;
  3347. rng = crypto_alloc_rng(driver, type, mask);
  3348. if (IS_ERR(rng)) {
  3349. printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
  3350. "%ld\n", driver, PTR_ERR(rng));
  3351. return PTR_ERR(rng);
  3352. }
  3353. err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
  3354. crypto_free_rng(rng);
  3355. return err;
  3356. }
  3357. static int drbg_cavs_test(const struct drbg_testvec *test, int pr,
  3358. const char *driver, u32 type, u32 mask)
  3359. {
  3360. int ret = -EAGAIN;
  3361. struct crypto_rng *drng;
  3362. struct drbg_test_data test_data;
  3363. struct drbg_string addtl, pers, testentropy;
  3364. unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
  3365. if (!buf)
  3366. return -ENOMEM;
  3367. drng = crypto_alloc_rng(driver, type, mask);
  3368. if (IS_ERR(drng)) {
  3369. printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
  3370. "%s\n", driver);
  3371. kfree_sensitive(buf);
  3372. return -ENOMEM;
  3373. }
  3374. test_data.testentropy = &testentropy;
  3375. drbg_string_fill(&testentropy, test->entropy, test->entropylen);
  3376. drbg_string_fill(&pers, test->pers, test->perslen);
  3377. ret = crypto_drbg_reset_test(drng, &pers, &test_data);
  3378. if (ret) {
  3379. printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
  3380. goto outbuf;
  3381. }
  3382. drbg_string_fill(&addtl, test->addtla, test->addtllen);
  3383. if (pr) {
  3384. drbg_string_fill(&testentropy, test->entpra, test->entprlen);
  3385. ret = crypto_drbg_get_bytes_addtl_test(drng,
  3386. buf, test->expectedlen, &addtl, &test_data);
  3387. } else {
  3388. ret = crypto_drbg_get_bytes_addtl(drng,
  3389. buf, test->expectedlen, &addtl);
  3390. }
  3391. if (ret < 0) {
  3392. printk(KERN_ERR "alg: drbg: could not obtain random data for "
  3393. "driver %s\n", driver);
  3394. goto outbuf;
  3395. }
  3396. drbg_string_fill(&addtl, test->addtlb, test->addtllen);
  3397. if (pr) {
  3398. drbg_string_fill(&testentropy, test->entprb, test->entprlen);
  3399. ret = crypto_drbg_get_bytes_addtl_test(drng,
  3400. buf, test->expectedlen, &addtl, &test_data);
  3401. } else {
  3402. ret = crypto_drbg_get_bytes_addtl(drng,
  3403. buf, test->expectedlen, &addtl);
  3404. }
  3405. if (ret < 0) {
  3406. printk(KERN_ERR "alg: drbg: could not obtain random data for "
  3407. "driver %s\n", driver);
  3408. goto outbuf;
  3409. }
  3410. ret = memcmp(test->expected, buf, test->expectedlen);
  3411. outbuf:
  3412. crypto_free_rng(drng);
  3413. kfree_sensitive(buf);
  3414. return ret;
  3415. }
  3416. static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
  3417. u32 type, u32 mask)
  3418. {
  3419. int err = 0;
  3420. int pr = 0;
  3421. int i = 0;
  3422. const struct drbg_testvec *template = desc->suite.drbg.vecs;
  3423. unsigned int tcount = desc->suite.drbg.count;
  3424. if (0 == memcmp(driver, "drbg_pr_", 8))
  3425. pr = 1;
  3426. for (i = 0; i < tcount; i++) {
  3427. err = drbg_cavs_test(&template[i], pr, driver, type, mask);
  3428. if (err) {
  3429. printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
  3430. i, driver);
  3431. err = -EINVAL;
  3432. break;
  3433. }
  3434. }
  3435. return err;
  3436. }
  3437. static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec,
  3438. const char *alg)
  3439. {
  3440. struct kpp_request *req;
  3441. void *input_buf = NULL;
  3442. void *output_buf = NULL;
  3443. void *a_public = NULL;
  3444. void *a_ss = NULL;
  3445. void *shared_secret = NULL;
  3446. struct crypto_wait wait;
  3447. unsigned int out_len_max;
  3448. int err = -ENOMEM;
  3449. struct scatterlist src, dst;
  3450. req = kpp_request_alloc(tfm, GFP_KERNEL);
  3451. if (!req)
  3452. return err;
  3453. crypto_init_wait(&wait);
  3454. err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
  3455. if (err < 0)
  3456. goto free_req;
  3457. out_len_max = crypto_kpp_maxsize(tfm);
  3458. output_buf = kzalloc(out_len_max, GFP_KERNEL);
  3459. if (!output_buf) {
  3460. err = -ENOMEM;
  3461. goto free_req;
  3462. }
  3463. /* Use appropriate parameter as base */
  3464. kpp_request_set_input(req, NULL, 0);
  3465. sg_init_one(&dst, output_buf, out_len_max);
  3466. kpp_request_set_output(req, &dst, out_len_max);
  3467. kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  3468. crypto_req_done, &wait);
  3469. /* Compute party A's public key */
  3470. err = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait);
  3471. if (err) {
  3472. pr_err("alg: %s: Party A: generate public key test failed. err %d\n",
  3473. alg, err);
  3474. goto free_output;
  3475. }
  3476. if (vec->genkey) {
  3477. /* Save party A's public key */
  3478. a_public = kmemdup(sg_virt(req->dst), out_len_max, GFP_KERNEL);
  3479. if (!a_public) {
  3480. err = -ENOMEM;
  3481. goto free_output;
  3482. }
  3483. } else {
  3484. /* Verify calculated public key */
  3485. if (memcmp(vec->expected_a_public, sg_virt(req->dst),
  3486. vec->expected_a_public_size)) {
  3487. pr_err("alg: %s: Party A: generate public key test failed. Invalid output\n",
  3488. alg);
  3489. err = -EINVAL;
  3490. goto free_output;
  3491. }
  3492. }
  3493. /* Calculate shared secret key by using counter part (b) public key. */
  3494. input_buf = kmemdup(vec->b_public, vec->b_public_size, GFP_KERNEL);
  3495. if (!input_buf) {
  3496. err = -ENOMEM;
  3497. goto free_output;
  3498. }
  3499. sg_init_one(&src, input_buf, vec->b_public_size);
  3500. sg_init_one(&dst, output_buf, out_len_max);
  3501. kpp_request_set_input(req, &src, vec->b_public_size);
  3502. kpp_request_set_output(req, &dst, out_len_max);
  3503. kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  3504. crypto_req_done, &wait);
  3505. err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait);
  3506. if (err) {
  3507. pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n",
  3508. alg, err);
  3509. goto free_all;
  3510. }
  3511. if (vec->genkey) {
  3512. /* Save the shared secret obtained by party A */
  3513. a_ss = kmemdup(sg_virt(req->dst), vec->expected_ss_size, GFP_KERNEL);
  3514. if (!a_ss) {
  3515. err = -ENOMEM;
  3516. goto free_all;
  3517. }
  3518. /*
  3519. * Calculate party B's shared secret by using party A's
  3520. * public key.
  3521. */
  3522. err = crypto_kpp_set_secret(tfm, vec->b_secret,
  3523. vec->b_secret_size);
  3524. if (err < 0)
  3525. goto free_all;
  3526. sg_init_one(&src, a_public, vec->expected_a_public_size);
  3527. sg_init_one(&dst, output_buf, out_len_max);
  3528. kpp_request_set_input(req, &src, vec->expected_a_public_size);
  3529. kpp_request_set_output(req, &dst, out_len_max);
  3530. kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  3531. crypto_req_done, &wait);
  3532. err = crypto_wait_req(crypto_kpp_compute_shared_secret(req),
  3533. &wait);
  3534. if (err) {
  3535. pr_err("alg: %s: Party B: compute shared secret failed. err %d\n",
  3536. alg, err);
  3537. goto free_all;
  3538. }
  3539. shared_secret = a_ss;
  3540. } else {
  3541. shared_secret = (void *)vec->expected_ss;
  3542. }
  3543. /*
  3544. * verify shared secret from which the user will derive
  3545. * secret key by executing whatever hash it has chosen
  3546. */
  3547. if (memcmp(shared_secret, sg_virt(req->dst),
  3548. vec->expected_ss_size)) {
  3549. pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
  3550. alg);
  3551. err = -EINVAL;
  3552. }
  3553. free_all:
  3554. kfree(a_ss);
  3555. kfree(input_buf);
  3556. free_output:
  3557. kfree(a_public);
  3558. kfree(output_buf);
  3559. free_req:
  3560. kpp_request_free(req);
  3561. return err;
  3562. }
  3563. static int test_kpp(struct crypto_kpp *tfm, const char *alg,
  3564. const struct kpp_testvec *vecs, unsigned int tcount)
  3565. {
  3566. int ret, i;
  3567. for (i = 0; i < tcount; i++) {
  3568. ret = do_test_kpp(tfm, vecs++, alg);
  3569. if (ret) {
  3570. pr_err("alg: %s: test failed on vector %d, err=%d\n",
  3571. alg, i + 1, ret);
  3572. return ret;
  3573. }
  3574. }
  3575. return 0;
  3576. }
  3577. static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver,
  3578. u32 type, u32 mask)
  3579. {
  3580. struct crypto_kpp *tfm;
  3581. int err = 0;
  3582. tfm = crypto_alloc_kpp(driver, type, mask);
  3583. if (IS_ERR(tfm)) {
  3584. pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
  3585. driver, PTR_ERR(tfm));
  3586. return PTR_ERR(tfm);
  3587. }
  3588. if (desc->suite.kpp.vecs)
  3589. err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs,
  3590. desc->suite.kpp.count);
  3591. crypto_free_kpp(tfm);
  3592. return err;
  3593. }
  3594. static u8 *test_pack_u32(u8 *dst, u32 val)
  3595. {
  3596. memcpy(dst, &val, sizeof(val));
  3597. return dst + sizeof(val);
  3598. }
  3599. static int test_akcipher_one(struct crypto_akcipher *tfm,
  3600. const struct akcipher_testvec *vecs)
  3601. {
  3602. char *xbuf[XBUFSIZE];
  3603. struct akcipher_request *req;
  3604. void *outbuf_enc = NULL;
  3605. void *outbuf_dec = NULL;
  3606. struct crypto_wait wait;
  3607. unsigned int out_len_max, out_len = 0;
  3608. int err = -ENOMEM;
  3609. struct scatterlist src, dst, src_tab[3];
  3610. const char *m, *c;
  3611. unsigned int m_size, c_size;
  3612. const char *op;
  3613. u8 *key, *ptr;
  3614. if (testmgr_alloc_buf(xbuf))
  3615. return err;
  3616. req = akcipher_request_alloc(tfm, GFP_KERNEL);
  3617. if (!req)
  3618. goto free_xbuf;
  3619. crypto_init_wait(&wait);
  3620. key = kmalloc(vecs->key_len + sizeof(u32) * 2 + vecs->param_len,
  3621. GFP_KERNEL);
  3622. if (!key)
  3623. goto free_req;
  3624. memcpy(key, vecs->key, vecs->key_len);
  3625. ptr = key + vecs->key_len;
  3626. ptr = test_pack_u32(ptr, vecs->algo);
  3627. ptr = test_pack_u32(ptr, vecs->param_len);
  3628. memcpy(ptr, vecs->params, vecs->param_len);
  3629. if (vecs->public_key_vec)
  3630. err = crypto_akcipher_set_pub_key(tfm, key, vecs->key_len);
  3631. else
  3632. err = crypto_akcipher_set_priv_key(tfm, key, vecs->key_len);
  3633. if (err)
  3634. goto free_key;
  3635. /*
  3636. * First run test which do not require a private key, such as
  3637. * encrypt or verify.
  3638. */
  3639. err = -ENOMEM;
  3640. out_len_max = crypto_akcipher_maxsize(tfm);
  3641. outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
  3642. if (!outbuf_enc)
  3643. goto free_key;
  3644. if (!vecs->siggen_sigver_test) {
  3645. m = vecs->m;
  3646. m_size = vecs->m_size;
  3647. c = vecs->c;
  3648. c_size = vecs->c_size;
  3649. op = "encrypt";
  3650. } else {
  3651. /* Swap args so we could keep plaintext (digest)
  3652. * in vecs->m, and cooked signature in vecs->c.
  3653. */
  3654. m = vecs->c; /* signature */
  3655. m_size = vecs->c_size;
  3656. c = vecs->m; /* digest */
  3657. c_size = vecs->m_size;
  3658. op = "verify";
  3659. }
  3660. err = -E2BIG;
  3661. if (WARN_ON(m_size > PAGE_SIZE))
  3662. goto free_all;
  3663. memcpy(xbuf[0], m, m_size);
  3664. sg_init_table(src_tab, 3);
  3665. sg_set_buf(&src_tab[0], xbuf[0], 8);
  3666. sg_set_buf(&src_tab[1], xbuf[0] + 8, m_size - 8);
  3667. if (vecs->siggen_sigver_test) {
  3668. if (WARN_ON(c_size > PAGE_SIZE))
  3669. goto free_all;
  3670. memcpy(xbuf[1], c, c_size);
  3671. sg_set_buf(&src_tab[2], xbuf[1], c_size);
  3672. akcipher_request_set_crypt(req, src_tab, NULL, m_size, c_size);
  3673. } else {
  3674. sg_init_one(&dst, outbuf_enc, out_len_max);
  3675. akcipher_request_set_crypt(req, src_tab, &dst, m_size,
  3676. out_len_max);
  3677. }
  3678. akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  3679. crypto_req_done, &wait);
  3680. err = crypto_wait_req(vecs->siggen_sigver_test ?
  3681. /* Run asymmetric signature verification */
  3682. crypto_akcipher_verify(req) :
  3683. /* Run asymmetric encrypt */
  3684. crypto_akcipher_encrypt(req), &wait);
  3685. if (err) {
  3686. pr_err("alg: akcipher: %s test failed. err %d\n", op, err);
  3687. goto free_all;
  3688. }
  3689. if (!vecs->siggen_sigver_test && c) {
  3690. if (req->dst_len != c_size) {
  3691. pr_err("alg: akcipher: %s test failed. Invalid output len\n",
  3692. op);
  3693. err = -EINVAL;
  3694. goto free_all;
  3695. }
  3696. /* verify that encrypted message is equal to expected */
  3697. if (memcmp(c, outbuf_enc, c_size) != 0) {
  3698. pr_err("alg: akcipher: %s test failed. Invalid output\n",
  3699. op);
  3700. hexdump(outbuf_enc, c_size);
  3701. err = -EINVAL;
  3702. goto free_all;
  3703. }
  3704. }
  3705. /*
  3706. * Don't invoke (decrypt or sign) test which require a private key
  3707. * for vectors with only a public key.
  3708. */
  3709. if (vecs->public_key_vec) {
  3710. err = 0;
  3711. goto free_all;
  3712. }
  3713. outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
  3714. if (!outbuf_dec) {
  3715. err = -ENOMEM;
  3716. goto free_all;
  3717. }
  3718. if (!vecs->siggen_sigver_test && !c) {
  3719. c = outbuf_enc;
  3720. c_size = req->dst_len;
  3721. }
  3722. err = -E2BIG;
  3723. op = vecs->siggen_sigver_test ? "sign" : "decrypt";
  3724. if (WARN_ON(c_size > PAGE_SIZE))
  3725. goto free_all;
  3726. memcpy(xbuf[0], c, c_size);
  3727. sg_init_one(&src, xbuf[0], c_size);
  3728. sg_init_one(&dst, outbuf_dec, out_len_max);
  3729. crypto_init_wait(&wait);
  3730. akcipher_request_set_crypt(req, &src, &dst, c_size, out_len_max);
  3731. err = crypto_wait_req(vecs->siggen_sigver_test ?
  3732. /* Run asymmetric signature generation */
  3733. crypto_akcipher_sign(req) :
  3734. /* Run asymmetric decrypt */
  3735. crypto_akcipher_decrypt(req), &wait);
  3736. if (err) {
  3737. pr_err("alg: akcipher: %s test failed. err %d\n", op, err);
  3738. goto free_all;
  3739. }
  3740. out_len = req->dst_len;
  3741. if (out_len < m_size) {
  3742. pr_err("alg: akcipher: %s test failed. Invalid output len %u\n",
  3743. op, out_len);
  3744. err = -EINVAL;
  3745. goto free_all;
  3746. }
  3747. /* verify that decrypted message is equal to the original msg */
  3748. if (memchr_inv(outbuf_dec, 0, out_len - m_size) ||
  3749. memcmp(m, outbuf_dec + out_len - m_size, m_size)) {
  3750. pr_err("alg: akcipher: %s test failed. Invalid output\n", op);
  3751. hexdump(outbuf_dec, out_len);
  3752. err = -EINVAL;
  3753. }
  3754. free_all:
  3755. kfree(outbuf_dec);
  3756. kfree(outbuf_enc);
  3757. free_key:
  3758. kfree(key);
  3759. free_req:
  3760. akcipher_request_free(req);
  3761. free_xbuf:
  3762. testmgr_free_buf(xbuf);
  3763. return err;
  3764. }
  3765. static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
  3766. const struct akcipher_testvec *vecs,
  3767. unsigned int tcount)
  3768. {
  3769. const char *algo =
  3770. crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm));
  3771. int ret, i;
  3772. for (i = 0; i < tcount; i++) {
  3773. ret = test_akcipher_one(tfm, vecs++);
  3774. if (!ret)
  3775. continue;
  3776. pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
  3777. i + 1, algo, ret);
  3778. return ret;
  3779. }
  3780. return 0;
  3781. }
  3782. static int alg_test_akcipher(const struct alg_test_desc *desc,
  3783. const char *driver, u32 type, u32 mask)
  3784. {
  3785. struct crypto_akcipher *tfm;
  3786. int err = 0;
  3787. tfm = crypto_alloc_akcipher(driver, type, mask);
  3788. if (IS_ERR(tfm)) {
  3789. pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
  3790. driver, PTR_ERR(tfm));
  3791. return PTR_ERR(tfm);
  3792. }
  3793. if (desc->suite.akcipher.vecs)
  3794. err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
  3795. desc->suite.akcipher.count);
  3796. crypto_free_akcipher(tfm);
  3797. return err;
  3798. }
  3799. static int alg_test_null(const struct alg_test_desc *desc,
  3800. const char *driver, u32 type, u32 mask)
  3801. {
  3802. return 0;
  3803. }
  3804. #define ____VECS(tv) .vecs = tv, .count = ARRAY_SIZE(tv)
  3805. #define __VECS(tv) { ____VECS(tv) }
  3806. /* Please keep this list sorted by algorithm name. */
  3807. static const struct alg_test_desc alg_test_descs[] = {
  3808. {
  3809. .alg = "adiantum(xchacha12,aes)",
  3810. .generic_driver = "adiantum(xchacha12-generic,aes-generic,nhpoly1305-generic)",
  3811. .test = alg_test_skcipher,
  3812. .suite = {
  3813. .cipher = __VECS(adiantum_xchacha12_aes_tv_template)
  3814. },
  3815. }, {
  3816. .alg = "adiantum(xchacha20,aes)",
  3817. .generic_driver = "adiantum(xchacha20-generic,aes-generic,nhpoly1305-generic)",
  3818. .test = alg_test_skcipher,
  3819. .suite = {
  3820. .cipher = __VECS(adiantum_xchacha20_aes_tv_template)
  3821. },
  3822. }, {
  3823. .alg = "aegis128",
  3824. .test = alg_test_aead,
  3825. .suite = {
  3826. .aead = __VECS(aegis128_tv_template)
  3827. }
  3828. }, {
  3829. .alg = "ansi_cprng",
  3830. .test = alg_test_cprng,
  3831. .suite = {
  3832. .cprng = __VECS(ansi_cprng_aes_tv_template)
  3833. }
  3834. }, {
  3835. .alg = "authenc(hmac(md5),ecb(cipher_null))",
  3836. .test = alg_test_aead,
  3837. .suite = {
  3838. .aead = __VECS(hmac_md5_ecb_cipher_null_tv_template)
  3839. }
  3840. }, {
  3841. .alg = "authenc(hmac(sha1),cbc(aes))",
  3842. .test = alg_test_aead,
  3843. .fips_allowed = 1,
  3844. .suite = {
  3845. .aead = __VECS(hmac_sha1_aes_cbc_tv_temp)
  3846. }
  3847. }, {
  3848. .alg = "authenc(hmac(sha1),cbc(des))",
  3849. .test = alg_test_aead,
  3850. .suite = {
  3851. .aead = __VECS(hmac_sha1_des_cbc_tv_temp)
  3852. }
  3853. }, {
  3854. .alg = "authenc(hmac(sha1),cbc(des3_ede))",
  3855. .test = alg_test_aead,
  3856. .suite = {
  3857. .aead = __VECS(hmac_sha1_des3_ede_cbc_tv_temp)
  3858. }
  3859. }, {
  3860. .alg = "authenc(hmac(sha1),ctr(aes))",
  3861. .test = alg_test_null,
  3862. .fips_allowed = 1,
  3863. }, {
  3864. .alg = "authenc(hmac(sha1),ecb(cipher_null))",
  3865. .test = alg_test_aead,
  3866. .suite = {
  3867. .aead = __VECS(hmac_sha1_ecb_cipher_null_tv_temp)
  3868. }
  3869. }, {
  3870. .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
  3871. .test = alg_test_null,
  3872. .fips_allowed = 1,
  3873. }, {
  3874. .alg = "authenc(hmac(sha224),cbc(des))",
  3875. .test = alg_test_aead,
  3876. .suite = {
  3877. .aead = __VECS(hmac_sha224_des_cbc_tv_temp)
  3878. }
  3879. }, {
  3880. .alg = "authenc(hmac(sha224),cbc(des3_ede))",
  3881. .test = alg_test_aead,
  3882. .suite = {
  3883. .aead = __VECS(hmac_sha224_des3_ede_cbc_tv_temp)
  3884. }
  3885. }, {
  3886. .alg = "authenc(hmac(sha256),cbc(aes))",
  3887. .test = alg_test_aead,
  3888. .fips_allowed = 1,
  3889. .suite = {
  3890. .aead = __VECS(hmac_sha256_aes_cbc_tv_temp)
  3891. }
  3892. }, {
  3893. .alg = "authenc(hmac(sha256),cbc(des))",
  3894. .test = alg_test_aead,
  3895. .suite = {
  3896. .aead = __VECS(hmac_sha256_des_cbc_tv_temp)
  3897. }
  3898. }, {
  3899. .alg = "authenc(hmac(sha256),cbc(des3_ede))",
  3900. .test = alg_test_aead,
  3901. .suite = {
  3902. .aead = __VECS(hmac_sha256_des3_ede_cbc_tv_temp)
  3903. }
  3904. }, {
  3905. .alg = "authenc(hmac(sha256),ctr(aes))",
  3906. .test = alg_test_null,
  3907. .fips_allowed = 1,
  3908. }, {
  3909. .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
  3910. .test = alg_test_null,
  3911. .fips_allowed = 1,
  3912. }, {
  3913. .alg = "authenc(hmac(sha384),cbc(des))",
  3914. .test = alg_test_aead,
  3915. .suite = {
  3916. .aead = __VECS(hmac_sha384_des_cbc_tv_temp)
  3917. }
  3918. }, {
  3919. .alg = "authenc(hmac(sha384),cbc(des3_ede))",
  3920. .test = alg_test_aead,
  3921. .suite = {
  3922. .aead = __VECS(hmac_sha384_des3_ede_cbc_tv_temp)
  3923. }
  3924. }, {
  3925. .alg = "authenc(hmac(sha384),ctr(aes))",
  3926. .test = alg_test_null,
  3927. .fips_allowed = 1,
  3928. }, {
  3929. .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
  3930. .test = alg_test_null,
  3931. .fips_allowed = 1,
  3932. }, {
  3933. .alg = "authenc(hmac(sha512),cbc(aes))",
  3934. .fips_allowed = 1,
  3935. .test = alg_test_aead,
  3936. .suite = {
  3937. .aead = __VECS(hmac_sha512_aes_cbc_tv_temp)
  3938. }
  3939. }, {
  3940. .alg = "authenc(hmac(sha512),cbc(des))",
  3941. .test = alg_test_aead,
  3942. .suite = {
  3943. .aead = __VECS(hmac_sha512_des_cbc_tv_temp)
  3944. }
  3945. }, {
  3946. .alg = "authenc(hmac(sha512),cbc(des3_ede))",
  3947. .test = alg_test_aead,
  3948. .suite = {
  3949. .aead = __VECS(hmac_sha512_des3_ede_cbc_tv_temp)
  3950. }
  3951. }, {
  3952. .alg = "authenc(hmac(sha512),ctr(aes))",
  3953. .test = alg_test_null,
  3954. .fips_allowed = 1,
  3955. }, {
  3956. .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
  3957. .test = alg_test_null,
  3958. .fips_allowed = 1,
  3959. }, {
  3960. .alg = "blake2b-160",
  3961. .test = alg_test_hash,
  3962. .fips_allowed = 0,
  3963. .suite = {
  3964. .hash = __VECS(blake2b_160_tv_template)
  3965. }
  3966. }, {
  3967. .alg = "blake2b-256",
  3968. .test = alg_test_hash,
  3969. .fips_allowed = 0,
  3970. .suite = {
  3971. .hash = __VECS(blake2b_256_tv_template)
  3972. }
  3973. }, {
  3974. .alg = "blake2b-384",
  3975. .test = alg_test_hash,
  3976. .fips_allowed = 0,
  3977. .suite = {
  3978. .hash = __VECS(blake2b_384_tv_template)
  3979. }
  3980. }, {
  3981. .alg = "blake2b-512",
  3982. .test = alg_test_hash,
  3983. .fips_allowed = 0,
  3984. .suite = {
  3985. .hash = __VECS(blake2b_512_tv_template)
  3986. }
  3987. }, {
  3988. .alg = "cbc(aes)",
  3989. .test = alg_test_skcipher,
  3990. .fips_allowed = 1,
  3991. .suite = {
  3992. .cipher = __VECS(aes_cbc_tv_template)
  3993. },
  3994. }, {
  3995. .alg = "cbc(anubis)",
  3996. .test = alg_test_skcipher,
  3997. .suite = {
  3998. .cipher = __VECS(anubis_cbc_tv_template)
  3999. },
  4000. }, {
  4001. .alg = "cbc(aria)",
  4002. .test = alg_test_skcipher,
  4003. .suite = {
  4004. .cipher = __VECS(aria_cbc_tv_template)
  4005. },
  4006. }, {
  4007. .alg = "cbc(blowfish)",
  4008. .test = alg_test_skcipher,
  4009. .suite = {
  4010. .cipher = __VECS(bf_cbc_tv_template)
  4011. },
  4012. }, {
  4013. .alg = "cbc(camellia)",
  4014. .test = alg_test_skcipher,
  4015. .suite = {
  4016. .cipher = __VECS(camellia_cbc_tv_template)
  4017. },
  4018. }, {
  4019. .alg = "cbc(cast5)",
  4020. .test = alg_test_skcipher,
  4021. .suite = {
  4022. .cipher = __VECS(cast5_cbc_tv_template)
  4023. },
  4024. }, {
  4025. .alg = "cbc(cast6)",
  4026. .test = alg_test_skcipher,
  4027. .suite = {
  4028. .cipher = __VECS(cast6_cbc_tv_template)
  4029. },
  4030. }, {
  4031. .alg = "cbc(des)",
  4032. .test = alg_test_skcipher,
  4033. .suite = {
  4034. .cipher = __VECS(des_cbc_tv_template)
  4035. },
  4036. }, {
  4037. .alg = "cbc(des3_ede)",
  4038. .test = alg_test_skcipher,
  4039. .suite = {
  4040. .cipher = __VECS(des3_ede_cbc_tv_template)
  4041. },
  4042. }, {
  4043. /* Same as cbc(aes) except the key is stored in
  4044. * hardware secure memory which we reference by index
  4045. */
  4046. .alg = "cbc(paes)",
  4047. .test = alg_test_null,
  4048. .fips_allowed = 1,
  4049. }, {
  4050. /* Same as cbc(sm4) except the key is stored in
  4051. * hardware secure memory which we reference by index
  4052. */
  4053. .alg = "cbc(psm4)",
  4054. .test = alg_test_null,
  4055. }, {
  4056. .alg = "cbc(serpent)",
  4057. .test = alg_test_skcipher,
  4058. .suite = {
  4059. .cipher = __VECS(serpent_cbc_tv_template)
  4060. },
  4061. }, {
  4062. .alg = "cbc(sm4)",
  4063. .test = alg_test_skcipher,
  4064. .suite = {
  4065. .cipher = __VECS(sm4_cbc_tv_template)
  4066. }
  4067. }, {
  4068. .alg = "cbc(twofish)",
  4069. .test = alg_test_skcipher,
  4070. .suite = {
  4071. .cipher = __VECS(tf_cbc_tv_template)
  4072. },
  4073. }, {
  4074. #if IS_ENABLED(CONFIG_CRYPTO_PAES_S390)
  4075. .alg = "cbc-paes-s390",
  4076. .fips_allowed = 1,
  4077. .test = alg_test_skcipher,
  4078. .suite = {
  4079. .cipher = __VECS(aes_cbc_tv_template)
  4080. }
  4081. }, {
  4082. #endif
  4083. .alg = "cbcmac(aes)",
  4084. .fips_allowed = 1,
  4085. .test = alg_test_hash,
  4086. .suite = {
  4087. .hash = __VECS(aes_cbcmac_tv_template)
  4088. }
  4089. }, {
  4090. .alg = "cbcmac(sm4)",
  4091. .test = alg_test_hash,
  4092. .suite = {
  4093. .hash = __VECS(sm4_cbcmac_tv_template)
  4094. }
  4095. }, {
  4096. .alg = "ccm(aes)",
  4097. .generic_driver = "ccm_base(ctr(aes-generic),cbcmac(aes-generic))",
  4098. .test = alg_test_aead,
  4099. .fips_allowed = 1,
  4100. .suite = {
  4101. .aead = {
  4102. ____VECS(aes_ccm_tv_template),
  4103. .einval_allowed = 1,
  4104. }
  4105. }
  4106. }, {
  4107. .alg = "ccm(sm4)",
  4108. .generic_driver = "ccm_base(ctr(sm4-generic),cbcmac(sm4-generic))",
  4109. .test = alg_test_aead,
  4110. .suite = {
  4111. .aead = {
  4112. ____VECS(sm4_ccm_tv_template),
  4113. .einval_allowed = 1,
  4114. }
  4115. }
  4116. }, {
  4117. .alg = "cfb(aes)",
  4118. .test = alg_test_skcipher,
  4119. .fips_allowed = 1,
  4120. .suite = {
  4121. .cipher = __VECS(aes_cfb_tv_template)
  4122. },
  4123. }, {
  4124. .alg = "cfb(aria)",
  4125. .test = alg_test_skcipher,
  4126. .suite = {
  4127. .cipher = __VECS(aria_cfb_tv_template)
  4128. },
  4129. }, {
  4130. .alg = "cfb(sm4)",
  4131. .test = alg_test_skcipher,
  4132. .suite = {
  4133. .cipher = __VECS(sm4_cfb_tv_template)
  4134. }
  4135. }, {
  4136. .alg = "chacha20",
  4137. .test = alg_test_skcipher,
  4138. .suite = {
  4139. .cipher = __VECS(chacha20_tv_template)
  4140. },
  4141. }, {
  4142. .alg = "cmac(aes)",
  4143. .fips_allowed = 1,
  4144. .test = alg_test_hash,
  4145. .suite = {
  4146. .hash = __VECS(aes_cmac128_tv_template)
  4147. }
  4148. }, {
  4149. .alg = "cmac(des3_ede)",
  4150. .test = alg_test_hash,
  4151. .suite = {
  4152. .hash = __VECS(des3_ede_cmac64_tv_template)
  4153. }
  4154. }, {
  4155. .alg = "cmac(sm4)",
  4156. .test = alg_test_hash,
  4157. .suite = {
  4158. .hash = __VECS(sm4_cmac128_tv_template)
  4159. }
  4160. }, {
  4161. .alg = "compress_null",
  4162. .test = alg_test_null,
  4163. }, {
  4164. .alg = "crc32",
  4165. .test = alg_test_hash,
  4166. .fips_allowed = 1,
  4167. .suite = {
  4168. .hash = __VECS(crc32_tv_template)
  4169. }
  4170. }, {
  4171. .alg = "crc32c",
  4172. .test = alg_test_crc32c,
  4173. .fips_allowed = 1,
  4174. .suite = {
  4175. .hash = __VECS(crc32c_tv_template)
  4176. }
  4177. }, {
  4178. .alg = "crc64-rocksoft",
  4179. .test = alg_test_hash,
  4180. .fips_allowed = 1,
  4181. .suite = {
  4182. .hash = __VECS(crc64_rocksoft_tv_template)
  4183. }
  4184. }, {
  4185. .alg = "crct10dif",
  4186. .test = alg_test_hash,
  4187. .fips_allowed = 1,
  4188. .suite = {
  4189. .hash = __VECS(crct10dif_tv_template)
  4190. }
  4191. }, {
  4192. .alg = "ctr(aes)",
  4193. .test = alg_test_skcipher,
  4194. .fips_allowed = 1,
  4195. .suite = {
  4196. .cipher = __VECS(aes_ctr_tv_template)
  4197. }
  4198. }, {
  4199. .alg = "ctr(aria)",
  4200. .test = alg_test_skcipher,
  4201. .suite = {
  4202. .cipher = __VECS(aria_ctr_tv_template)
  4203. }
  4204. }, {
  4205. .alg = "ctr(blowfish)",
  4206. .test = alg_test_skcipher,
  4207. .suite = {
  4208. .cipher = __VECS(bf_ctr_tv_template)
  4209. }
  4210. }, {
  4211. .alg = "ctr(camellia)",
  4212. .test = alg_test_skcipher,
  4213. .suite = {
  4214. .cipher = __VECS(camellia_ctr_tv_template)
  4215. }
  4216. }, {
  4217. .alg = "ctr(cast5)",
  4218. .test = alg_test_skcipher,
  4219. .suite = {
  4220. .cipher = __VECS(cast5_ctr_tv_template)
  4221. }
  4222. }, {
  4223. .alg = "ctr(cast6)",
  4224. .test = alg_test_skcipher,
  4225. .suite = {
  4226. .cipher = __VECS(cast6_ctr_tv_template)
  4227. }
  4228. }, {
  4229. .alg = "ctr(des)",
  4230. .test = alg_test_skcipher,
  4231. .suite = {
  4232. .cipher = __VECS(des_ctr_tv_template)
  4233. }
  4234. }, {
  4235. .alg = "ctr(des3_ede)",
  4236. .test = alg_test_skcipher,
  4237. .suite = {
  4238. .cipher = __VECS(des3_ede_ctr_tv_template)
  4239. }
  4240. }, {
  4241. /* Same as ctr(aes) except the key is stored in
  4242. * hardware secure memory which we reference by index
  4243. */
  4244. .alg = "ctr(paes)",
  4245. .test = alg_test_null,
  4246. .fips_allowed = 1,
  4247. }, {
  4248. /* Same as ctr(sm4) except the key is stored in
  4249. * hardware secure memory which we reference by index
  4250. */
  4251. .alg = "ctr(psm4)",
  4252. .test = alg_test_null,
  4253. }, {
  4254. .alg = "ctr(serpent)",
  4255. .test = alg_test_skcipher,
  4256. .suite = {
  4257. .cipher = __VECS(serpent_ctr_tv_template)
  4258. }
  4259. }, {
  4260. .alg = "ctr(sm4)",
  4261. .test = alg_test_skcipher,
  4262. .suite = {
  4263. .cipher = __VECS(sm4_ctr_tv_template)
  4264. }
  4265. }, {
  4266. .alg = "ctr(twofish)",
  4267. .test = alg_test_skcipher,
  4268. .suite = {
  4269. .cipher = __VECS(tf_ctr_tv_template)
  4270. }
  4271. }, {
  4272. #if IS_ENABLED(CONFIG_CRYPTO_PAES_S390)
  4273. .alg = "ctr-paes-s390",
  4274. .fips_allowed = 1,
  4275. .test = alg_test_skcipher,
  4276. .suite = {
  4277. .cipher = __VECS(aes_ctr_tv_template)
  4278. }
  4279. }, {
  4280. #endif
  4281. .alg = "cts(cbc(aes))",
  4282. .test = alg_test_skcipher,
  4283. .fips_allowed = 1,
  4284. .suite = {
  4285. .cipher = __VECS(cts_mode_tv_template)
  4286. }
  4287. }, {
  4288. /* Same as cts(cbc((aes)) except the key is stored in
  4289. * hardware secure memory which we reference by index
  4290. */
  4291. .alg = "cts(cbc(paes))",
  4292. .test = alg_test_null,
  4293. .fips_allowed = 1,
  4294. }, {
  4295. .alg = "curve25519",
  4296. .test = alg_test_kpp,
  4297. .suite = {
  4298. .kpp = __VECS(curve25519_tv_template)
  4299. }
  4300. }, {
  4301. .alg = "deflate",
  4302. .test = alg_test_comp,
  4303. .fips_allowed = 1,
  4304. .suite = {
  4305. .comp = {
  4306. .comp = __VECS(deflate_comp_tv_template),
  4307. .decomp = __VECS(deflate_decomp_tv_template)
  4308. }
  4309. }
  4310. }, {
  4311. .alg = "dh",
  4312. .test = alg_test_kpp,
  4313. .suite = {
  4314. .kpp = __VECS(dh_tv_template)
  4315. }
  4316. }, {
  4317. .alg = "digest_null",
  4318. .test = alg_test_null,
  4319. }, {
  4320. .alg = "drbg_nopr_ctr_aes128",
  4321. .test = alg_test_drbg,
  4322. .fips_allowed = 1,
  4323. .suite = {
  4324. .drbg = __VECS(drbg_nopr_ctr_aes128_tv_template)
  4325. }
  4326. }, {
  4327. .alg = "drbg_nopr_ctr_aes192",
  4328. .test = alg_test_drbg,
  4329. .fips_allowed = 1,
  4330. .suite = {
  4331. .drbg = __VECS(drbg_nopr_ctr_aes192_tv_template)
  4332. }
  4333. }, {
  4334. .alg = "drbg_nopr_ctr_aes256",
  4335. .test = alg_test_drbg,
  4336. .fips_allowed = 1,
  4337. .suite = {
  4338. .drbg = __VECS(drbg_nopr_ctr_aes256_tv_template)
  4339. }
  4340. }, {
  4341. /*
  4342. * There is no need to specifically test the DRBG with every
  4343. * backend cipher -- covered by drbg_nopr_hmac_sha256 test
  4344. */
  4345. .alg = "drbg_nopr_hmac_sha1",
  4346. .fips_allowed = 1,
  4347. .test = alg_test_null,
  4348. }, {
  4349. .alg = "drbg_nopr_hmac_sha256",
  4350. .test = alg_test_drbg,
  4351. .fips_allowed = 1,
  4352. .suite = {
  4353. .drbg = __VECS(drbg_nopr_hmac_sha256_tv_template)
  4354. }
  4355. }, {
  4356. /* covered by drbg_nopr_hmac_sha256 test */
  4357. .alg = "drbg_nopr_hmac_sha384",
  4358. .fips_allowed = 1,
  4359. .test = alg_test_null,
  4360. }, {
  4361. .alg = "drbg_nopr_hmac_sha512",
  4362. .test = alg_test_drbg,
  4363. .fips_allowed = 1,
  4364. .suite = {
  4365. .drbg = __VECS(drbg_nopr_hmac_sha512_tv_template)
  4366. }
  4367. }, {
  4368. .alg = "drbg_nopr_sha1",
  4369. .fips_allowed = 1,
  4370. .test = alg_test_null,
  4371. }, {
  4372. .alg = "drbg_nopr_sha256",
  4373. .test = alg_test_drbg,
  4374. .fips_allowed = 1,
  4375. .suite = {
  4376. .drbg = __VECS(drbg_nopr_sha256_tv_template)
  4377. }
  4378. }, {
  4379. /* covered by drbg_nopr_sha256 test */
  4380. .alg = "drbg_nopr_sha384",
  4381. .fips_allowed = 1,
  4382. .test = alg_test_null,
  4383. }, {
  4384. .alg = "drbg_nopr_sha512",
  4385. .fips_allowed = 1,
  4386. .test = alg_test_null,
  4387. }, {
  4388. .alg = "drbg_pr_ctr_aes128",
  4389. .test = alg_test_drbg,
  4390. .fips_allowed = 1,
  4391. .suite = {
  4392. .drbg = __VECS(drbg_pr_ctr_aes128_tv_template)
  4393. }
  4394. }, {
  4395. /* covered by drbg_pr_ctr_aes128 test */
  4396. .alg = "drbg_pr_ctr_aes192",
  4397. .fips_allowed = 1,
  4398. .test = alg_test_null,
  4399. }, {
  4400. .alg = "drbg_pr_ctr_aes256",
  4401. .fips_allowed = 1,
  4402. .test = alg_test_null,
  4403. }, {
  4404. .alg = "drbg_pr_hmac_sha1",
  4405. .fips_allowed = 1,
  4406. .test = alg_test_null,
  4407. }, {
  4408. .alg = "drbg_pr_hmac_sha256",
  4409. .test = alg_test_drbg,
  4410. .fips_allowed = 1,
  4411. .suite = {
  4412. .drbg = __VECS(drbg_pr_hmac_sha256_tv_template)
  4413. }
  4414. }, {
  4415. /* covered by drbg_pr_hmac_sha256 test */
  4416. .alg = "drbg_pr_hmac_sha384",
  4417. .fips_allowed = 1,
  4418. .test = alg_test_null,
  4419. }, {
  4420. .alg = "drbg_pr_hmac_sha512",
  4421. .test = alg_test_null,
  4422. .fips_allowed = 1,
  4423. }, {
  4424. .alg = "drbg_pr_sha1",
  4425. .fips_allowed = 1,
  4426. .test = alg_test_null,
  4427. }, {
  4428. .alg = "drbg_pr_sha256",
  4429. .test = alg_test_drbg,
  4430. .fips_allowed = 1,
  4431. .suite = {
  4432. .drbg = __VECS(drbg_pr_sha256_tv_template)
  4433. }
  4434. }, {
  4435. /* covered by drbg_pr_sha256 test */
  4436. .alg = "drbg_pr_sha384",
  4437. .fips_allowed = 1,
  4438. .test = alg_test_null,
  4439. }, {
  4440. .alg = "drbg_pr_sha512",
  4441. .fips_allowed = 1,
  4442. .test = alg_test_null,
  4443. }, {
  4444. .alg = "ecb(aes)",
  4445. .test = alg_test_skcipher,
  4446. .fips_allowed = 1,
  4447. .suite = {
  4448. .cipher = __VECS(aes_tv_template)
  4449. }
  4450. }, {
  4451. .alg = "ecb(anubis)",
  4452. .test = alg_test_skcipher,
  4453. .suite = {
  4454. .cipher = __VECS(anubis_tv_template)
  4455. }
  4456. }, {
  4457. .alg = "ecb(arc4)",
  4458. .generic_driver = "ecb(arc4)-generic",
  4459. .test = alg_test_skcipher,
  4460. .suite = {
  4461. .cipher = __VECS(arc4_tv_template)
  4462. }
  4463. }, {
  4464. .alg = "ecb(aria)",
  4465. .test = alg_test_skcipher,
  4466. .suite = {
  4467. .cipher = __VECS(aria_tv_template)
  4468. }
  4469. }, {
  4470. .alg = "ecb(blowfish)",
  4471. .test = alg_test_skcipher,
  4472. .suite = {
  4473. .cipher = __VECS(bf_tv_template)
  4474. }
  4475. }, {
  4476. .alg = "ecb(camellia)",
  4477. .test = alg_test_skcipher,
  4478. .suite = {
  4479. .cipher = __VECS(camellia_tv_template)
  4480. }
  4481. }, {
  4482. .alg = "ecb(cast5)",
  4483. .test = alg_test_skcipher,
  4484. .suite = {
  4485. .cipher = __VECS(cast5_tv_template)
  4486. }
  4487. }, {
  4488. .alg = "ecb(cast6)",
  4489. .test = alg_test_skcipher,
  4490. .suite = {
  4491. .cipher = __VECS(cast6_tv_template)
  4492. }
  4493. }, {
  4494. .alg = "ecb(cipher_null)",
  4495. .test = alg_test_null,
  4496. .fips_allowed = 1,
  4497. }, {
  4498. .alg = "ecb(des)",
  4499. .test = alg_test_skcipher,
  4500. .suite = {
  4501. .cipher = __VECS(des_tv_template)
  4502. }
  4503. }, {
  4504. .alg = "ecb(des3_ede)",
  4505. .test = alg_test_skcipher,
  4506. .suite = {
  4507. .cipher = __VECS(des3_ede_tv_template)
  4508. }
  4509. }, {
  4510. .alg = "ecb(fcrypt)",
  4511. .test = alg_test_skcipher,
  4512. .suite = {
  4513. .cipher = {
  4514. .vecs = fcrypt_pcbc_tv_template,
  4515. .count = 1
  4516. }
  4517. }
  4518. }, {
  4519. .alg = "ecb(khazad)",
  4520. .test = alg_test_skcipher,
  4521. .suite = {
  4522. .cipher = __VECS(khazad_tv_template)
  4523. }
  4524. }, {
  4525. /* Same as ecb(aes) except the key is stored in
  4526. * hardware secure memory which we reference by index
  4527. */
  4528. .alg = "ecb(paes)",
  4529. .test = alg_test_null,
  4530. .fips_allowed = 1,
  4531. }, {
  4532. .alg = "ecb(seed)",
  4533. .test = alg_test_skcipher,
  4534. .suite = {
  4535. .cipher = __VECS(seed_tv_template)
  4536. }
  4537. }, {
  4538. .alg = "ecb(serpent)",
  4539. .test = alg_test_skcipher,
  4540. .suite = {
  4541. .cipher = __VECS(serpent_tv_template)
  4542. }
  4543. }, {
  4544. .alg = "ecb(sm4)",
  4545. .test = alg_test_skcipher,
  4546. .suite = {
  4547. .cipher = __VECS(sm4_tv_template)
  4548. }
  4549. }, {
  4550. .alg = "ecb(tea)",
  4551. .test = alg_test_skcipher,
  4552. .suite = {
  4553. .cipher = __VECS(tea_tv_template)
  4554. }
  4555. }, {
  4556. .alg = "ecb(twofish)",
  4557. .test = alg_test_skcipher,
  4558. .suite = {
  4559. .cipher = __VECS(tf_tv_template)
  4560. }
  4561. }, {
  4562. .alg = "ecb(xeta)",
  4563. .test = alg_test_skcipher,
  4564. .suite = {
  4565. .cipher = __VECS(xeta_tv_template)
  4566. }
  4567. }, {
  4568. .alg = "ecb(xtea)",
  4569. .test = alg_test_skcipher,
  4570. .suite = {
  4571. .cipher = __VECS(xtea_tv_template)
  4572. }
  4573. }, {
  4574. #if IS_ENABLED(CONFIG_CRYPTO_PAES_S390)
  4575. .alg = "ecb-paes-s390",
  4576. .fips_allowed = 1,
  4577. .test = alg_test_skcipher,
  4578. .suite = {
  4579. .cipher = __VECS(aes_tv_template)
  4580. }
  4581. }, {
  4582. #endif
  4583. .alg = "ecdh-nist-p192",
  4584. .test = alg_test_kpp,
  4585. .suite = {
  4586. .kpp = __VECS(ecdh_p192_tv_template)
  4587. }
  4588. }, {
  4589. .alg = "ecdh-nist-p256",
  4590. .test = alg_test_kpp,
  4591. .fips_allowed = 1,
  4592. .suite = {
  4593. .kpp = __VECS(ecdh_p256_tv_template)
  4594. }
  4595. }, {
  4596. .alg = "ecdh-nist-p384",
  4597. .test = alg_test_kpp,
  4598. .fips_allowed = 1,
  4599. .suite = {
  4600. .kpp = __VECS(ecdh_p384_tv_template)
  4601. }
  4602. }, {
  4603. .alg = "ecdsa-nist-p192",
  4604. .test = alg_test_akcipher,
  4605. .suite = {
  4606. .akcipher = __VECS(ecdsa_nist_p192_tv_template)
  4607. }
  4608. }, {
  4609. .alg = "ecdsa-nist-p256",
  4610. .test = alg_test_akcipher,
  4611. .suite = {
  4612. .akcipher = __VECS(ecdsa_nist_p256_tv_template)
  4613. }
  4614. }, {
  4615. .alg = "ecdsa-nist-p384",
  4616. .test = alg_test_akcipher,
  4617. .suite = {
  4618. .akcipher = __VECS(ecdsa_nist_p384_tv_template)
  4619. }
  4620. }, {
  4621. .alg = "ecrdsa",
  4622. .test = alg_test_akcipher,
  4623. .suite = {
  4624. .akcipher = __VECS(ecrdsa_tv_template)
  4625. }
  4626. }, {
  4627. .alg = "essiv(authenc(hmac(sha256),cbc(aes)),sha256)",
  4628. .test = alg_test_aead,
  4629. .fips_allowed = 1,
  4630. .suite = {
  4631. .aead = __VECS(essiv_hmac_sha256_aes_cbc_tv_temp)
  4632. }
  4633. }, {
  4634. .alg = "essiv(cbc(aes),sha256)",
  4635. .test = alg_test_skcipher,
  4636. .fips_allowed = 1,
  4637. .suite = {
  4638. .cipher = __VECS(essiv_aes_cbc_tv_template)
  4639. }
  4640. }, {
  4641. #if IS_ENABLED(CONFIG_CRYPTO_DH_RFC7919_GROUPS)
  4642. .alg = "ffdhe2048(dh)",
  4643. .test = alg_test_kpp,
  4644. .fips_allowed = 1,
  4645. .suite = {
  4646. .kpp = __VECS(ffdhe2048_dh_tv_template)
  4647. }
  4648. }, {
  4649. .alg = "ffdhe3072(dh)",
  4650. .test = alg_test_kpp,
  4651. .fips_allowed = 1,
  4652. .suite = {
  4653. .kpp = __VECS(ffdhe3072_dh_tv_template)
  4654. }
  4655. }, {
  4656. .alg = "ffdhe4096(dh)",
  4657. .test = alg_test_kpp,
  4658. .fips_allowed = 1,
  4659. .suite = {
  4660. .kpp = __VECS(ffdhe4096_dh_tv_template)
  4661. }
  4662. }, {
  4663. .alg = "ffdhe6144(dh)",
  4664. .test = alg_test_kpp,
  4665. .fips_allowed = 1,
  4666. .suite = {
  4667. .kpp = __VECS(ffdhe6144_dh_tv_template)
  4668. }
  4669. }, {
  4670. .alg = "ffdhe8192(dh)",
  4671. .test = alg_test_kpp,
  4672. .fips_allowed = 1,
  4673. .suite = {
  4674. .kpp = __VECS(ffdhe8192_dh_tv_template)
  4675. }
  4676. }, {
  4677. #endif /* CONFIG_CRYPTO_DH_RFC7919_GROUPS */
  4678. .alg = "gcm(aes)",
  4679. .generic_driver = "gcm_base(ctr(aes-generic),ghash-generic)",
  4680. .test = alg_test_aead,
  4681. .fips_allowed = 1,
  4682. .suite = {
  4683. .aead = __VECS(aes_gcm_tv_template)
  4684. }
  4685. }, {
  4686. .alg = "gcm(aria)",
  4687. .generic_driver = "gcm_base(ctr(aria-generic),ghash-generic)",
  4688. .test = alg_test_aead,
  4689. .suite = {
  4690. .aead = __VECS(aria_gcm_tv_template)
  4691. }
  4692. }, {
  4693. .alg = "gcm(sm4)",
  4694. .generic_driver = "gcm_base(ctr(sm4-generic),ghash-generic)",
  4695. .test = alg_test_aead,
  4696. .suite = {
  4697. .aead = __VECS(sm4_gcm_tv_template)
  4698. }
  4699. }, {
  4700. .alg = "ghash",
  4701. .test = alg_test_hash,
  4702. .fips_allowed = 1,
  4703. .suite = {
  4704. .hash = __VECS(ghash_tv_template)
  4705. }
  4706. }, {
  4707. .alg = "hctr2(aes)",
  4708. .generic_driver =
  4709. "hctr2_base(xctr(aes-generic),polyval-generic)",
  4710. .test = alg_test_skcipher,
  4711. .suite = {
  4712. .cipher = __VECS(aes_hctr2_tv_template)
  4713. }
  4714. }, {
  4715. .alg = "hmac(md5)",
  4716. .test = alg_test_hash,
  4717. .suite = {
  4718. .hash = __VECS(hmac_md5_tv_template)
  4719. }
  4720. }, {
  4721. .alg = "hmac(rmd160)",
  4722. .test = alg_test_hash,
  4723. .suite = {
  4724. .hash = __VECS(hmac_rmd160_tv_template)
  4725. }
  4726. }, {
  4727. .alg = "hmac(sha1)",
  4728. .test = alg_test_hash,
  4729. .fips_allowed = 1,
  4730. .suite = {
  4731. .hash = __VECS(hmac_sha1_tv_template)
  4732. }
  4733. }, {
  4734. .alg = "hmac(sha224)",
  4735. .test = alg_test_hash,
  4736. .fips_allowed = 1,
  4737. .suite = {
  4738. .hash = __VECS(hmac_sha224_tv_template)
  4739. }
  4740. }, {
  4741. .alg = "hmac(sha256)",
  4742. .test = alg_test_hash,
  4743. .fips_allowed = 1,
  4744. .suite = {
  4745. .hash = __VECS(hmac_sha256_tv_template)
  4746. }
  4747. }, {
  4748. .alg = "hmac(sha3-224)",
  4749. .test = alg_test_hash,
  4750. .fips_allowed = 1,
  4751. .suite = {
  4752. .hash = __VECS(hmac_sha3_224_tv_template)
  4753. }
  4754. }, {
  4755. .alg = "hmac(sha3-256)",
  4756. .test = alg_test_hash,
  4757. .fips_allowed = 1,
  4758. .suite = {
  4759. .hash = __VECS(hmac_sha3_256_tv_template)
  4760. }
  4761. }, {
  4762. .alg = "hmac(sha3-384)",
  4763. .test = alg_test_hash,
  4764. .fips_allowed = 1,
  4765. .suite = {
  4766. .hash = __VECS(hmac_sha3_384_tv_template)
  4767. }
  4768. }, {
  4769. .alg = "hmac(sha3-512)",
  4770. .test = alg_test_hash,
  4771. .fips_allowed = 1,
  4772. .suite = {
  4773. .hash = __VECS(hmac_sha3_512_tv_template)
  4774. }
  4775. }, {
  4776. .alg = "hmac(sha384)",
  4777. .test = alg_test_hash,
  4778. .fips_allowed = 1,
  4779. .suite = {
  4780. .hash = __VECS(hmac_sha384_tv_template)
  4781. }
  4782. }, {
  4783. .alg = "hmac(sha512)",
  4784. .test = alg_test_hash,
  4785. .fips_allowed = 1,
  4786. .suite = {
  4787. .hash = __VECS(hmac_sha512_tv_template)
  4788. }
  4789. }, {
  4790. .alg = "hmac(sm3)",
  4791. .test = alg_test_hash,
  4792. .suite = {
  4793. .hash = __VECS(hmac_sm3_tv_template)
  4794. }
  4795. }, {
  4796. .alg = "hmac(streebog256)",
  4797. .test = alg_test_hash,
  4798. .suite = {
  4799. .hash = __VECS(hmac_streebog256_tv_template)
  4800. }
  4801. }, {
  4802. .alg = "hmac(streebog512)",
  4803. .test = alg_test_hash,
  4804. .suite = {
  4805. .hash = __VECS(hmac_streebog512_tv_template)
  4806. }
  4807. }, {
  4808. .alg = "jitterentropy_rng",
  4809. .fips_allowed = 1,
  4810. .test = alg_test_null,
  4811. }, {
  4812. .alg = "kw(aes)",
  4813. .test = alg_test_skcipher,
  4814. .fips_allowed = 1,
  4815. .suite = {
  4816. .cipher = __VECS(aes_kw_tv_template)
  4817. }
  4818. }, {
  4819. .alg = "lrw(aes)",
  4820. .generic_driver = "lrw(ecb(aes-generic))",
  4821. .test = alg_test_skcipher,
  4822. .suite = {
  4823. .cipher = __VECS(aes_lrw_tv_template)
  4824. }
  4825. }, {
  4826. .alg = "lrw(camellia)",
  4827. .generic_driver = "lrw(ecb(camellia-generic))",
  4828. .test = alg_test_skcipher,
  4829. .suite = {
  4830. .cipher = __VECS(camellia_lrw_tv_template)
  4831. }
  4832. }, {
  4833. .alg = "lrw(cast6)",
  4834. .generic_driver = "lrw(ecb(cast6-generic))",
  4835. .test = alg_test_skcipher,
  4836. .suite = {
  4837. .cipher = __VECS(cast6_lrw_tv_template)
  4838. }
  4839. }, {
  4840. .alg = "lrw(serpent)",
  4841. .generic_driver = "lrw(ecb(serpent-generic))",
  4842. .test = alg_test_skcipher,
  4843. .suite = {
  4844. .cipher = __VECS(serpent_lrw_tv_template)
  4845. }
  4846. }, {
  4847. .alg = "lrw(twofish)",
  4848. .generic_driver = "lrw(ecb(twofish-generic))",
  4849. .test = alg_test_skcipher,
  4850. .suite = {
  4851. .cipher = __VECS(tf_lrw_tv_template)
  4852. }
  4853. }, {
  4854. .alg = "lz4",
  4855. .test = alg_test_comp,
  4856. .fips_allowed = 1,
  4857. .suite = {
  4858. .comp = {
  4859. .comp = __VECS(lz4_comp_tv_template),
  4860. .decomp = __VECS(lz4_decomp_tv_template)
  4861. }
  4862. }
  4863. }, {
  4864. .alg = "lz4hc",
  4865. .test = alg_test_comp,
  4866. .fips_allowed = 1,
  4867. .suite = {
  4868. .comp = {
  4869. .comp = __VECS(lz4hc_comp_tv_template),
  4870. .decomp = __VECS(lz4hc_decomp_tv_template)
  4871. }
  4872. }
  4873. }, {
  4874. .alg = "lzo",
  4875. .test = alg_test_comp,
  4876. .fips_allowed = 1,
  4877. .suite = {
  4878. .comp = {
  4879. .comp = __VECS(lzo_comp_tv_template),
  4880. .decomp = __VECS(lzo_decomp_tv_template)
  4881. }
  4882. }
  4883. }, {
  4884. .alg = "lzo-rle",
  4885. .test = alg_test_comp,
  4886. .fips_allowed = 1,
  4887. .suite = {
  4888. .comp = {
  4889. .comp = __VECS(lzorle_comp_tv_template),
  4890. .decomp = __VECS(lzorle_decomp_tv_template)
  4891. }
  4892. }
  4893. }, {
  4894. .alg = "md4",
  4895. .test = alg_test_hash,
  4896. .suite = {
  4897. .hash = __VECS(md4_tv_template)
  4898. }
  4899. }, {
  4900. .alg = "md5",
  4901. .test = alg_test_hash,
  4902. .suite = {
  4903. .hash = __VECS(md5_tv_template)
  4904. }
  4905. }, {
  4906. .alg = "michael_mic",
  4907. .test = alg_test_hash,
  4908. .suite = {
  4909. .hash = __VECS(michael_mic_tv_template)
  4910. }
  4911. }, {
  4912. .alg = "nhpoly1305",
  4913. .test = alg_test_hash,
  4914. .suite = {
  4915. .hash = __VECS(nhpoly1305_tv_template)
  4916. }
  4917. }, {
  4918. .alg = "ofb(aes)",
  4919. .test = alg_test_skcipher,
  4920. .fips_allowed = 1,
  4921. .suite = {
  4922. .cipher = __VECS(aes_ofb_tv_template)
  4923. }
  4924. }, {
  4925. /* Same as ofb(aes) except the key is stored in
  4926. * hardware secure memory which we reference by index
  4927. */
  4928. .alg = "ofb(paes)",
  4929. .test = alg_test_null,
  4930. .fips_allowed = 1,
  4931. }, {
  4932. .alg = "ofb(sm4)",
  4933. .test = alg_test_skcipher,
  4934. .suite = {
  4935. .cipher = __VECS(sm4_ofb_tv_template)
  4936. }
  4937. }, {
  4938. .alg = "pcbc(fcrypt)",
  4939. .test = alg_test_skcipher,
  4940. .suite = {
  4941. .cipher = __VECS(fcrypt_pcbc_tv_template)
  4942. }
  4943. }, {
  4944. .alg = "pkcs1pad(rsa,sha224)",
  4945. .test = alg_test_null,
  4946. .fips_allowed = 1,
  4947. }, {
  4948. .alg = "pkcs1pad(rsa,sha256)",
  4949. .test = alg_test_akcipher,
  4950. .fips_allowed = 1,
  4951. .suite = {
  4952. .akcipher = __VECS(pkcs1pad_rsa_tv_template)
  4953. }
  4954. }, {
  4955. .alg = "pkcs1pad(rsa,sha384)",
  4956. .test = alg_test_null,
  4957. .fips_allowed = 1,
  4958. }, {
  4959. .alg = "pkcs1pad(rsa,sha512)",
  4960. .test = alg_test_null,
  4961. .fips_allowed = 1,
  4962. }, {
  4963. .alg = "poly1305",
  4964. .test = alg_test_hash,
  4965. .suite = {
  4966. .hash = __VECS(poly1305_tv_template)
  4967. }
  4968. }, {
  4969. .alg = "polyval",
  4970. .test = alg_test_hash,
  4971. .suite = {
  4972. .hash = __VECS(polyval_tv_template)
  4973. }
  4974. }, {
  4975. .alg = "rfc3686(ctr(aes))",
  4976. .test = alg_test_skcipher,
  4977. .fips_allowed = 1,
  4978. .suite = {
  4979. .cipher = __VECS(aes_ctr_rfc3686_tv_template)
  4980. }
  4981. }, {
  4982. .alg = "rfc3686(ctr(sm4))",
  4983. .test = alg_test_skcipher,
  4984. .suite = {
  4985. .cipher = __VECS(sm4_ctr_rfc3686_tv_template)
  4986. }
  4987. }, {
  4988. .alg = "rfc4106(gcm(aes))",
  4989. .generic_driver = "rfc4106(gcm_base(ctr(aes-generic),ghash-generic))",
  4990. .test = alg_test_aead,
  4991. .fips_allowed = 1,
  4992. .suite = {
  4993. .aead = {
  4994. ____VECS(aes_gcm_rfc4106_tv_template),
  4995. .einval_allowed = 1,
  4996. .aad_iv = 1,
  4997. }
  4998. }
  4999. }, {
  5000. .alg = "rfc4309(ccm(aes))",
  5001. .generic_driver = "rfc4309(ccm_base(ctr(aes-generic),cbcmac(aes-generic)))",
  5002. .test = alg_test_aead,
  5003. .fips_allowed = 1,
  5004. .suite = {
  5005. .aead = {
  5006. ____VECS(aes_ccm_rfc4309_tv_template),
  5007. .einval_allowed = 1,
  5008. .aad_iv = 1,
  5009. }
  5010. }
  5011. }, {
  5012. .alg = "rfc4543(gcm(aes))",
  5013. .generic_driver = "rfc4543(gcm_base(ctr(aes-generic),ghash-generic))",
  5014. .test = alg_test_aead,
  5015. .suite = {
  5016. .aead = {
  5017. ____VECS(aes_gcm_rfc4543_tv_template),
  5018. .einval_allowed = 1,
  5019. .aad_iv = 1,
  5020. }
  5021. }
  5022. }, {
  5023. .alg = "rfc7539(chacha20,poly1305)",
  5024. .test = alg_test_aead,
  5025. .suite = {
  5026. .aead = __VECS(rfc7539_tv_template)
  5027. }
  5028. }, {
  5029. .alg = "rfc7539esp(chacha20,poly1305)",
  5030. .test = alg_test_aead,
  5031. .suite = {
  5032. .aead = {
  5033. ____VECS(rfc7539esp_tv_template),
  5034. .einval_allowed = 1,
  5035. .aad_iv = 1,
  5036. }
  5037. }
  5038. }, {
  5039. .alg = "rmd160",
  5040. .test = alg_test_hash,
  5041. .suite = {
  5042. .hash = __VECS(rmd160_tv_template)
  5043. }
  5044. }, {
  5045. .alg = "rsa",
  5046. .test = alg_test_akcipher,
  5047. .fips_allowed = 1,
  5048. .suite = {
  5049. .akcipher = __VECS(rsa_tv_template)
  5050. }
  5051. }, {
  5052. .alg = "sha1",
  5053. .test = alg_test_hash,
  5054. .fips_allowed = 1,
  5055. .suite = {
  5056. .hash = __VECS(sha1_tv_template)
  5057. }
  5058. }, {
  5059. .alg = "sha224",
  5060. .test = alg_test_hash,
  5061. .fips_allowed = 1,
  5062. .suite = {
  5063. .hash = __VECS(sha224_tv_template)
  5064. }
  5065. }, {
  5066. .alg = "sha256",
  5067. .test = alg_test_hash,
  5068. .fips_allowed = 1,
  5069. .suite = {
  5070. .hash = __VECS(sha256_tv_template)
  5071. }
  5072. }, {
  5073. .alg = "sha3-224",
  5074. .test = alg_test_hash,
  5075. .fips_allowed = 1,
  5076. .suite = {
  5077. .hash = __VECS(sha3_224_tv_template)
  5078. }
  5079. }, {
  5080. .alg = "sha3-256",
  5081. .test = alg_test_hash,
  5082. .fips_allowed = 1,
  5083. .suite = {
  5084. .hash = __VECS(sha3_256_tv_template)
  5085. }
  5086. }, {
  5087. .alg = "sha3-384",
  5088. .test = alg_test_hash,
  5089. .fips_allowed = 1,
  5090. .suite = {
  5091. .hash = __VECS(sha3_384_tv_template)
  5092. }
  5093. }, {
  5094. .alg = "sha3-512",
  5095. .test = alg_test_hash,
  5096. .fips_allowed = 1,
  5097. .suite = {
  5098. .hash = __VECS(sha3_512_tv_template)
  5099. }
  5100. }, {
  5101. .alg = "sha384",
  5102. .test = alg_test_hash,
  5103. .fips_allowed = 1,
  5104. .suite = {
  5105. .hash = __VECS(sha384_tv_template)
  5106. }
  5107. }, {
  5108. .alg = "sha512",
  5109. .test = alg_test_hash,
  5110. .fips_allowed = 1,
  5111. .suite = {
  5112. .hash = __VECS(sha512_tv_template)
  5113. }
  5114. }, {
  5115. .alg = "sm2",
  5116. .test = alg_test_akcipher,
  5117. .suite = {
  5118. .akcipher = __VECS(sm2_tv_template)
  5119. }
  5120. }, {
  5121. .alg = "sm3",
  5122. .test = alg_test_hash,
  5123. .suite = {
  5124. .hash = __VECS(sm3_tv_template)
  5125. }
  5126. }, {
  5127. .alg = "streebog256",
  5128. .test = alg_test_hash,
  5129. .suite = {
  5130. .hash = __VECS(streebog256_tv_template)
  5131. }
  5132. }, {
  5133. .alg = "streebog512",
  5134. .test = alg_test_hash,
  5135. .suite = {
  5136. .hash = __VECS(streebog512_tv_template)
  5137. }
  5138. }, {
  5139. .alg = "vmac64(aes)",
  5140. .test = alg_test_hash,
  5141. .suite = {
  5142. .hash = __VECS(vmac64_aes_tv_template)
  5143. }
  5144. }, {
  5145. .alg = "wp256",
  5146. .test = alg_test_hash,
  5147. .suite = {
  5148. .hash = __VECS(wp256_tv_template)
  5149. }
  5150. }, {
  5151. .alg = "wp384",
  5152. .test = alg_test_hash,
  5153. .suite = {
  5154. .hash = __VECS(wp384_tv_template)
  5155. }
  5156. }, {
  5157. .alg = "wp512",
  5158. .test = alg_test_hash,
  5159. .suite = {
  5160. .hash = __VECS(wp512_tv_template)
  5161. }
  5162. }, {
  5163. .alg = "xcbc(aes)",
  5164. .test = alg_test_hash,
  5165. .suite = {
  5166. .hash = __VECS(aes_xcbc128_tv_template)
  5167. }
  5168. }, {
  5169. .alg = "xchacha12",
  5170. .test = alg_test_skcipher,
  5171. .suite = {
  5172. .cipher = __VECS(xchacha12_tv_template)
  5173. },
  5174. }, {
  5175. .alg = "xchacha20",
  5176. .test = alg_test_skcipher,
  5177. .suite = {
  5178. .cipher = __VECS(xchacha20_tv_template)
  5179. },
  5180. }, {
  5181. .alg = "xctr(aes)",
  5182. .test = alg_test_skcipher,
  5183. .suite = {
  5184. .cipher = __VECS(aes_xctr_tv_template)
  5185. }
  5186. }, {
  5187. .alg = "xts(aes)",
  5188. .generic_driver = "xts(ecb(aes-generic))",
  5189. .test = alg_test_skcipher,
  5190. .fips_allowed = 1,
  5191. .suite = {
  5192. .cipher = __VECS(aes_xts_tv_template)
  5193. }
  5194. }, {
  5195. .alg = "xts(camellia)",
  5196. .generic_driver = "xts(ecb(camellia-generic))",
  5197. .test = alg_test_skcipher,
  5198. .suite = {
  5199. .cipher = __VECS(camellia_xts_tv_template)
  5200. }
  5201. }, {
  5202. .alg = "xts(cast6)",
  5203. .generic_driver = "xts(ecb(cast6-generic))",
  5204. .test = alg_test_skcipher,
  5205. .suite = {
  5206. .cipher = __VECS(cast6_xts_tv_template)
  5207. }
  5208. }, {
  5209. /* Same as xts(aes) except the key is stored in
  5210. * hardware secure memory which we reference by index
  5211. */
  5212. .alg = "xts(paes)",
  5213. .test = alg_test_null,
  5214. .fips_allowed = 1,
  5215. }, {
  5216. .alg = "xts(serpent)",
  5217. .generic_driver = "xts(ecb(serpent-generic))",
  5218. .test = alg_test_skcipher,
  5219. .suite = {
  5220. .cipher = __VECS(serpent_xts_tv_template)
  5221. }
  5222. }, {
  5223. .alg = "xts(twofish)",
  5224. .generic_driver = "xts(ecb(twofish-generic))",
  5225. .test = alg_test_skcipher,
  5226. .suite = {
  5227. .cipher = __VECS(tf_xts_tv_template)
  5228. }
  5229. }, {
  5230. #if IS_ENABLED(CONFIG_CRYPTO_PAES_S390)
  5231. .alg = "xts-paes-s390",
  5232. .fips_allowed = 1,
  5233. .test = alg_test_skcipher,
  5234. .suite = {
  5235. .cipher = __VECS(aes_xts_tv_template)
  5236. }
  5237. }, {
  5238. #endif
  5239. .alg = "xts4096(paes)",
  5240. .test = alg_test_null,
  5241. .fips_allowed = 1,
  5242. }, {
  5243. .alg = "xts512(paes)",
  5244. .test = alg_test_null,
  5245. .fips_allowed = 1,
  5246. }, {
  5247. .alg = "xxhash64",
  5248. .test = alg_test_hash,
  5249. .fips_allowed = 1,
  5250. .suite = {
  5251. .hash = __VECS(xxhash64_tv_template)
  5252. }
  5253. }, {
  5254. .alg = "zlib-deflate",
  5255. .test = alg_test_comp,
  5256. .fips_allowed = 1,
  5257. .suite = {
  5258. .comp = {
  5259. .comp = __VECS(zlib_deflate_comp_tv_template),
  5260. .decomp = __VECS(zlib_deflate_decomp_tv_template)
  5261. }
  5262. }
  5263. }, {
  5264. .alg = "zstd",
  5265. .test = alg_test_comp,
  5266. .fips_allowed = 1,
  5267. .suite = {
  5268. .comp = {
  5269. .comp = __VECS(zstd_comp_tv_template),
  5270. .decomp = __VECS(zstd_decomp_tv_template)
  5271. }
  5272. }
  5273. }
  5274. };
  5275. static void alg_check_test_descs_order(void)
  5276. {
  5277. int i;
  5278. for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
  5279. int diff = strcmp(alg_test_descs[i - 1].alg,
  5280. alg_test_descs[i].alg);
  5281. if (WARN_ON(diff > 0)) {
  5282. pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
  5283. alg_test_descs[i - 1].alg,
  5284. alg_test_descs[i].alg);
  5285. }
  5286. if (WARN_ON(diff == 0)) {
  5287. pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
  5288. alg_test_descs[i].alg);
  5289. }
  5290. }
  5291. }
  5292. static void alg_check_testvec_configs(void)
  5293. {
  5294. int i;
  5295. for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++)
  5296. WARN_ON(!valid_testvec_config(
  5297. &default_cipher_testvec_configs[i]));
  5298. for (i = 0; i < ARRAY_SIZE(default_hash_testvec_configs); i++)
  5299. WARN_ON(!valid_testvec_config(
  5300. &default_hash_testvec_configs[i]));
  5301. }
  5302. static void testmgr_onetime_init(void)
  5303. {
  5304. alg_check_test_descs_order();
  5305. alg_check_testvec_configs();
  5306. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  5307. pr_warn("alg: extra crypto tests enabled. This is intended for developer use only.\n");
  5308. #endif
  5309. }
  5310. static int alg_find_test(const char *alg)
  5311. {
  5312. int start = 0;
  5313. int end = ARRAY_SIZE(alg_test_descs);
  5314. while (start < end) {
  5315. int i = (start + end) / 2;
  5316. int diff = strcmp(alg_test_descs[i].alg, alg);
  5317. if (diff > 0) {
  5318. end = i;
  5319. continue;
  5320. }
  5321. if (diff < 0) {
  5322. start = i + 1;
  5323. continue;
  5324. }
  5325. return i;
  5326. }
  5327. return -1;
  5328. }
  5329. static int alg_fips_disabled(const char *driver, const char *alg)
  5330. {
  5331. pr_info("alg: %s (%s) is disabled due to FIPS\n", alg, driver);
  5332. return -ECANCELED;
  5333. }
  5334. int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
  5335. {
  5336. int i;
  5337. int j;
  5338. int rc;
  5339. if (!fips_enabled && notests) {
  5340. printk_once(KERN_INFO "alg: self-tests disabled\n");
  5341. return 0;
  5342. }
  5343. DO_ONCE(testmgr_onetime_init);
  5344. if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
  5345. char nalg[CRYPTO_MAX_ALG_NAME];
  5346. if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
  5347. sizeof(nalg))
  5348. return -ENAMETOOLONG;
  5349. i = alg_find_test(nalg);
  5350. if (i < 0)
  5351. goto notest;
  5352. if (fips_enabled && !alg_test_descs[i].fips_allowed)
  5353. goto non_fips_alg;
  5354. rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
  5355. goto test_done;
  5356. }
  5357. i = alg_find_test(alg);
  5358. j = alg_find_test(driver);
  5359. if (i < 0 && j < 0)
  5360. goto notest;
  5361. if (fips_enabled) {
  5362. if (j >= 0 && !alg_test_descs[j].fips_allowed)
  5363. return -EINVAL;
  5364. if (i >= 0 && !alg_test_descs[i].fips_allowed)
  5365. goto non_fips_alg;
  5366. }
  5367. rc = 0;
  5368. if (i >= 0)
  5369. rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
  5370. type, mask);
  5371. if (j >= 0 && j != i)
  5372. rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
  5373. type, mask);
  5374. test_done:
  5375. if (rc) {
  5376. if (fips_enabled || panic_on_fail) {
  5377. fips_fail_notify();
  5378. panic("alg: self-tests for %s (%s) failed in %s mode!\n",
  5379. driver, alg,
  5380. fips_enabled ? "fips" : "panic_on_fail");
  5381. }
  5382. pr_warn("alg: self-tests for %s using %s failed (rc=%d)",
  5383. alg, driver, rc);
  5384. WARN(rc != -ENOENT,
  5385. "alg: self-tests for %s using %s failed (rc=%d)",
  5386. alg, driver, rc);
  5387. } else {
  5388. if (fips_enabled)
  5389. pr_info("alg: self-tests for %s (%s) passed\n",
  5390. driver, alg);
  5391. }
  5392. return rc;
  5393. notest:
  5394. printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
  5395. if (type & CRYPTO_ALG_FIPS_INTERNAL)
  5396. return alg_fips_disabled(driver, alg);
  5397. return 0;
  5398. non_fips_alg:
  5399. return alg_fips_disabled(driver, alg);
  5400. }
  5401. #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
  5402. EXPORT_SYMBOL_GPL(alg_test);