12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501 |
- // SPDX-License-Identifier: GPL-2.0
- /*
- * K3 SA2UL crypto accelerator driver
- *
- * Copyright (C) 2018-2020 Texas Instruments Incorporated - http://www.ti.com
- *
- * Authors: Keerthy
- * Vitaly Andrianov
- * Tero Kristo
- */
- #include <linux/bitfield.h>
- #include <linux/clk.h>
- #include <linux/dma-mapping.h>
- #include <linux/dmaengine.h>
- #include <linux/dmapool.h>
- #include <linux/kernel.h>
- #include <linux/module.h>
- #include <linux/of_device.h>
- #include <linux/platform_device.h>
- #include <linux/pm_runtime.h>
- #include <crypto/aes.h>
- #include <crypto/authenc.h>
- #include <crypto/des.h>
- #include <crypto/internal/aead.h>
- #include <crypto/internal/hash.h>
- #include <crypto/internal/skcipher.h>
- #include <crypto/scatterwalk.h>
- #include <crypto/sha1.h>
- #include <crypto/sha2.h>
- #include "sa2ul.h"
- /* Byte offset for key in encryption security context */
- #define SC_ENC_KEY_OFFSET (1 + 27 + 4)
- /* Byte offset for Aux-1 in encryption security context */
- #define SC_ENC_AUX1_OFFSET (1 + 27 + 4 + 32)
- #define SA_CMDL_UPD_ENC 0x0001
- #define SA_CMDL_UPD_AUTH 0x0002
- #define SA_CMDL_UPD_ENC_IV 0x0004
- #define SA_CMDL_UPD_AUTH_IV 0x0008
- #define SA_CMDL_UPD_AUX_KEY 0x0010
- #define SA_AUTH_SUBKEY_LEN 16
- #define SA_CMDL_PAYLOAD_LENGTH_MASK 0xFFFF
- #define SA_CMDL_SOP_BYPASS_LEN_MASK 0xFF000000
- #define MODE_CONTROL_BYTES 27
- #define SA_HASH_PROCESSING 0
- #define SA_CRYPTO_PROCESSING 0
- #define SA_UPLOAD_HASH_TO_TLR BIT(6)
- #define SA_SW0_FLAGS_MASK 0xF0000
- #define SA_SW0_CMDL_INFO_MASK 0x1F00000
- #define SA_SW0_CMDL_PRESENT BIT(4)
- #define SA_SW0_ENG_ID_MASK 0x3E000000
- #define SA_SW0_DEST_INFO_PRESENT BIT(30)
- #define SA_SW2_EGRESS_LENGTH 0xFF000000
- #define SA_BASIC_HASH 0x10
- #define SHA256_DIGEST_WORDS 8
- /* Make 32-bit word from 4 bytes */
- #define SA_MK_U32(b0, b1, b2, b3) (((b0) << 24) | ((b1) << 16) | \
- ((b2) << 8) | (b3))
- /* size of SCCTL structure in bytes */
- #define SA_SCCTL_SZ 16
- /* Max Authentication tag size */
- #define SA_MAX_AUTH_TAG_SZ 64
- enum sa_algo_id {
- SA_ALG_CBC_AES = 0,
- SA_ALG_EBC_AES,
- SA_ALG_CBC_DES3,
- SA_ALG_ECB_DES3,
- SA_ALG_SHA1,
- SA_ALG_SHA256,
- SA_ALG_SHA512,
- SA_ALG_AUTHENC_SHA1_AES,
- SA_ALG_AUTHENC_SHA256_AES,
- };
- struct sa_match_data {
- u8 priv;
- u8 priv_id;
- u32 supported_algos;
- };
- static struct device *sa_k3_dev;
- /**
- * struct sa_cmdl_cfg - Command label configuration descriptor
- * @aalg: authentication algorithm ID
- * @enc_eng_id: Encryption Engine ID supported by the SA hardware
- * @auth_eng_id: Authentication Engine ID
- * @iv_size: Initialization Vector size
- * @akey: Authentication key
- * @akey_len: Authentication key length
- * @enc: True, if this is an encode request
- */
- struct sa_cmdl_cfg {
- int aalg;
- u8 enc_eng_id;
- u8 auth_eng_id;
- u8 iv_size;
- const u8 *akey;
- u16 akey_len;
- bool enc;
- };
- /**
- * struct algo_data - Crypto algorithm specific data
- * @enc_eng: Encryption engine info structure
- * @auth_eng: Authentication engine info structure
- * @auth_ctrl: Authentication control word
- * @hash_size: Size of digest
- * @iv_idx: iv index in psdata
- * @iv_out_size: iv out size
- * @ealg_id: Encryption Algorithm ID
- * @aalg_id: Authentication algorithm ID
- * @mci_enc: Mode Control Instruction for Encryption algorithm
- * @mci_dec: Mode Control Instruction for Decryption
- * @inv_key: Whether the encryption algorithm demands key inversion
- * @ctx: Pointer to the algorithm context
- * @keyed_mac: Whether the authentication algorithm has key
- * @prep_iopad: Function pointer to generate intermediate ipad/opad
- */
- struct algo_data {
- struct sa_eng_info enc_eng;
- struct sa_eng_info auth_eng;
- u8 auth_ctrl;
- u8 hash_size;
- u8 iv_idx;
- u8 iv_out_size;
- u8 ealg_id;
- u8 aalg_id;
- u8 *mci_enc;
- u8 *mci_dec;
- bool inv_key;
- struct sa_tfm_ctx *ctx;
- bool keyed_mac;
- void (*prep_iopad)(struct algo_data *algo, const u8 *key,
- u16 key_sz, __be32 *ipad, __be32 *opad);
- };
- /**
- * struct sa_alg_tmpl: A generic template encompassing crypto/aead algorithms
- * @type: Type of the crypto algorithm.
- * @alg: Union of crypto algorithm definitions.
- * @registered: Flag indicating if the crypto algorithm is already registered
- */
- struct sa_alg_tmpl {
- u32 type; /* CRYPTO_ALG_TYPE from <linux/crypto.h> */
- union {
- struct skcipher_alg skcipher;
- struct ahash_alg ahash;
- struct aead_alg aead;
- } alg;
- bool registered;
- };
- /**
- * struct sa_mapped_sg: scatterlist information for tx and rx
- * @mapped: Set to true if the @sgt is mapped
- * @dir: mapping direction used for @sgt
- * @split_sg: Set if the sg is split and needs to be freed up
- * @static_sg: Static scatterlist entry for overriding data
- * @sgt: scatterlist table for DMA API use
- */
- struct sa_mapped_sg {
- bool mapped;
- enum dma_data_direction dir;
- struct scatterlist static_sg;
- struct scatterlist *split_sg;
- struct sg_table sgt;
- };
- /**
- * struct sa_rx_data: RX Packet miscellaneous data place holder
- * @req: crypto request data pointer
- * @ddev: pointer to the DMA device
- * @tx_in: dma_async_tx_descriptor pointer for rx channel
- * @mapped_sg: Information on tx (0) and rx (1) scatterlist DMA mapping
- * @enc: Flag indicating either encryption or decryption
- * @enc_iv_size: Initialisation vector size
- * @iv_idx: Initialisation vector index
- */
- struct sa_rx_data {
- void *req;
- struct device *ddev;
- struct dma_async_tx_descriptor *tx_in;
- struct sa_mapped_sg mapped_sg[2];
- u8 enc;
- u8 enc_iv_size;
- u8 iv_idx;
- };
- /**
- * struct sa_req: SA request definition
- * @dev: device for the request
- * @size: total data to the xmitted via DMA
- * @enc_offset: offset of cipher data
- * @enc_size: data to be passed to cipher engine
- * @enc_iv: cipher IV
- * @auth_offset: offset of the authentication data
- * @auth_size: size of the authentication data
- * @auth_iv: authentication IV
- * @type: algorithm type for the request
- * @cmdl: command label pointer
- * @base: pointer to the base request
- * @ctx: pointer to the algorithm context data
- * @enc: true if this is an encode request
- * @src: source data
- * @dst: destination data
- * @callback: DMA callback for the request
- * @mdata_size: metadata size passed to DMA
- */
- struct sa_req {
- struct device *dev;
- u16 size;
- u8 enc_offset;
- u16 enc_size;
- u8 *enc_iv;
- u8 auth_offset;
- u16 auth_size;
- u8 *auth_iv;
- u32 type;
- u32 *cmdl;
- struct crypto_async_request *base;
- struct sa_tfm_ctx *ctx;
- bool enc;
- struct scatterlist *src;
- struct scatterlist *dst;
- dma_async_tx_callback callback;
- u16 mdata_size;
- };
- /*
- * Mode Control Instructions for various Key lengths 128, 192, 256
- * For CBC (Cipher Block Chaining) mode for encryption
- */
- static u8 mci_cbc_enc_array[3][MODE_CONTROL_BYTES] = {
- { 0x61, 0x00, 0x00, 0x18, 0x88, 0x0a, 0xaa, 0x4b, 0x7e, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
- { 0x61, 0x00, 0x00, 0x18, 0x88, 0x4a, 0xaa, 0x4b, 0x7e, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
- { 0x61, 0x00, 0x00, 0x18, 0x88, 0x8a, 0xaa, 0x4b, 0x7e, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
- };
- /*
- * Mode Control Instructions for various Key lengths 128, 192, 256
- * For CBC (Cipher Block Chaining) mode for decryption
- */
- static u8 mci_cbc_dec_array[3][MODE_CONTROL_BYTES] = {
- { 0x71, 0x00, 0x00, 0x80, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
- { 0x71, 0x00, 0x00, 0x84, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
- { 0x71, 0x00, 0x00, 0x88, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
- };
- /*
- * Mode Control Instructions for various Key lengths 128, 192, 256
- * For CBC (Cipher Block Chaining) mode for encryption
- */
- static u8 mci_cbc_enc_no_iv_array[3][MODE_CONTROL_BYTES] = {
- { 0x21, 0x00, 0x00, 0x18, 0x88, 0x0a, 0xaa, 0x4b, 0x7e, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
- { 0x21, 0x00, 0x00, 0x18, 0x88, 0x4a, 0xaa, 0x4b, 0x7e, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
- { 0x21, 0x00, 0x00, 0x18, 0x88, 0x8a, 0xaa, 0x4b, 0x7e, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
- };
- /*
- * Mode Control Instructions for various Key lengths 128, 192, 256
- * For CBC (Cipher Block Chaining) mode for decryption
- */
- static u8 mci_cbc_dec_no_iv_array[3][MODE_CONTROL_BYTES] = {
- { 0x31, 0x00, 0x00, 0x80, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
- { 0x31, 0x00, 0x00, 0x84, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
- { 0x31, 0x00, 0x00, 0x88, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
- };
- /*
- * Mode Control Instructions for various Key lengths 128, 192, 256
- * For ECB (Electronic Code Book) mode for encryption
- */
- static u8 mci_ecb_enc_array[3][27] = {
- { 0x21, 0x00, 0x00, 0x80, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
- { 0x21, 0x00, 0x00, 0x84, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
- { 0x21, 0x00, 0x00, 0x88, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
- };
- /*
- * Mode Control Instructions for various Key lengths 128, 192, 256
- * For ECB (Electronic Code Book) mode for decryption
- */
- static u8 mci_ecb_dec_array[3][27] = {
- { 0x31, 0x00, 0x00, 0x80, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
- { 0x31, 0x00, 0x00, 0x84, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
- { 0x31, 0x00, 0x00, 0x88, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
- };
- /*
- * Mode Control Instructions for DES algorithm
- * For CBC (Cipher Block Chaining) mode and ECB mode
- * encryption and for decryption respectively
- */
- static u8 mci_cbc_3des_enc_array[MODE_CONTROL_BYTES] = {
- 0x60, 0x00, 0x00, 0x18, 0x88, 0x52, 0xaa, 0x4b, 0x7e, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00,
- };
- static u8 mci_cbc_3des_dec_array[MODE_CONTROL_BYTES] = {
- 0x70, 0x00, 0x00, 0x85, 0x0a, 0xca, 0x98, 0xf4, 0x40, 0xc0, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00,
- };
- static u8 mci_ecb_3des_enc_array[MODE_CONTROL_BYTES] = {
- 0x20, 0x00, 0x00, 0x85, 0x0a, 0x04, 0xb7, 0x90, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00,
- };
- static u8 mci_ecb_3des_dec_array[MODE_CONTROL_BYTES] = {
- 0x30, 0x00, 0x00, 0x85, 0x0a, 0x04, 0xb7, 0x90, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00,
- };
- /*
- * Perform 16 byte or 128 bit swizzling
- * The SA2UL Expects the security context to
- * be in little Endian and the bus width is 128 bits or 16 bytes
- * Hence swap 16 bytes at a time from higher to lower address
- */
- static void sa_swiz_128(u8 *in, u16 len)
- {
- u8 data[16];
- int i, j;
- for (i = 0; i < len; i += 16) {
- memcpy(data, &in[i], 16);
- for (j = 0; j < 16; j++)
- in[i + j] = data[15 - j];
- }
- }
- /* Prepare the ipad and opad from key as per SHA algorithm step 1*/
- static void prepare_kipad(u8 *k_ipad, const u8 *key, u16 key_sz)
- {
- int i;
- for (i = 0; i < key_sz; i++)
- k_ipad[i] = key[i] ^ 0x36;
- /* Instead of XOR with 0 */
- for (; i < SHA1_BLOCK_SIZE; i++)
- k_ipad[i] = 0x36;
- }
- static void prepare_kopad(u8 *k_opad, const u8 *key, u16 key_sz)
- {
- int i;
- for (i = 0; i < key_sz; i++)
- k_opad[i] = key[i] ^ 0x5c;
- /* Instead of XOR with 0 */
- for (; i < SHA1_BLOCK_SIZE; i++)
- k_opad[i] = 0x5c;
- }
- static void sa_export_shash(void *state, struct shash_desc *hash,
- int digest_size, __be32 *out)
- {
- struct sha1_state *sha1;
- struct sha256_state *sha256;
- u32 *result;
- switch (digest_size) {
- case SHA1_DIGEST_SIZE:
- sha1 = state;
- result = sha1->state;
- break;
- case SHA256_DIGEST_SIZE:
- sha256 = state;
- result = sha256->state;
- break;
- default:
- dev_err(sa_k3_dev, "%s: bad digest_size=%d\n", __func__,
- digest_size);
- return;
- }
- crypto_shash_export(hash, state);
- cpu_to_be32_array(out, result, digest_size / 4);
- }
- static void sa_prepare_iopads(struct algo_data *data, const u8 *key,
- u16 key_sz, __be32 *ipad, __be32 *opad)
- {
- SHASH_DESC_ON_STACK(shash, data->ctx->shash);
- int block_size = crypto_shash_blocksize(data->ctx->shash);
- int digest_size = crypto_shash_digestsize(data->ctx->shash);
- union {
- struct sha1_state sha1;
- struct sha256_state sha256;
- u8 k_pad[SHA1_BLOCK_SIZE];
- } sha;
- shash->tfm = data->ctx->shash;
- prepare_kipad(sha.k_pad, key, key_sz);
- crypto_shash_init(shash);
- crypto_shash_update(shash, sha.k_pad, block_size);
- sa_export_shash(&sha, shash, digest_size, ipad);
- prepare_kopad(sha.k_pad, key, key_sz);
- crypto_shash_init(shash);
- crypto_shash_update(shash, sha.k_pad, block_size);
- sa_export_shash(&sha, shash, digest_size, opad);
- memzero_explicit(&sha, sizeof(sha));
- }
- /* Derive the inverse key used in AES-CBC decryption operation */
- static inline int sa_aes_inv_key(u8 *inv_key, const u8 *key, u16 key_sz)
- {
- struct crypto_aes_ctx ctx;
- int key_pos;
- if (aes_expandkey(&ctx, key, key_sz)) {
- dev_err(sa_k3_dev, "%s: bad key len(%d)\n", __func__, key_sz);
- return -EINVAL;
- }
- /* work around to get the right inverse for AES_KEYSIZE_192 size keys */
- if (key_sz == AES_KEYSIZE_192) {
- ctx.key_enc[52] = ctx.key_enc[51] ^ ctx.key_enc[46];
- ctx.key_enc[53] = ctx.key_enc[52] ^ ctx.key_enc[47];
- }
- /* Based crypto_aes_expand_key logic */
- switch (key_sz) {
- case AES_KEYSIZE_128:
- case AES_KEYSIZE_192:
- key_pos = key_sz + 24;
- break;
- case AES_KEYSIZE_256:
- key_pos = key_sz + 24 - 4;
- break;
- default:
- dev_err(sa_k3_dev, "%s: bad key len(%d)\n", __func__, key_sz);
- return -EINVAL;
- }
- memcpy(inv_key, &ctx.key_enc[key_pos], key_sz);
- return 0;
- }
- /* Set Security context for the encryption engine */
- static int sa_set_sc_enc(struct algo_data *ad, const u8 *key, u16 key_sz,
- u8 enc, u8 *sc_buf)
- {
- const u8 *mci = NULL;
- /* Set Encryption mode selector to crypto processing */
- sc_buf[0] = SA_CRYPTO_PROCESSING;
- if (enc)
- mci = ad->mci_enc;
- else
- mci = ad->mci_dec;
- /* Set the mode control instructions in security context */
- if (mci)
- memcpy(&sc_buf[1], mci, MODE_CONTROL_BYTES);
- /* For AES-CBC decryption get the inverse key */
- if (ad->inv_key && !enc) {
- if (sa_aes_inv_key(&sc_buf[SC_ENC_KEY_OFFSET], key, key_sz))
- return -EINVAL;
- /* For all other cases: key is used */
- } else {
- memcpy(&sc_buf[SC_ENC_KEY_OFFSET], key, key_sz);
- }
- return 0;
- }
- /* Set Security context for the authentication engine */
- static void sa_set_sc_auth(struct algo_data *ad, const u8 *key, u16 key_sz,
- u8 *sc_buf)
- {
- __be32 *ipad = (void *)(sc_buf + 32);
- __be32 *opad = (void *)(sc_buf + 64);
- /* Set Authentication mode selector to hash processing */
- sc_buf[0] = SA_HASH_PROCESSING;
- /* Auth SW ctrl word: bit[6]=1 (upload computed hash to TLR section) */
- sc_buf[1] = SA_UPLOAD_HASH_TO_TLR;
- sc_buf[1] |= ad->auth_ctrl;
- /* Copy the keys or ipad/opad */
- if (ad->keyed_mac)
- ad->prep_iopad(ad, key, key_sz, ipad, opad);
- else {
- /* basic hash */
- sc_buf[1] |= SA_BASIC_HASH;
- }
- }
- static inline void sa_copy_iv(__be32 *out, const u8 *iv, bool size16)
- {
- int j;
- for (j = 0; j < ((size16) ? 4 : 2); j++) {
- *out = cpu_to_be32(*((u32 *)iv));
- iv += 4;
- out++;
- }
- }
- /* Format general command label */
- static int sa_format_cmdl_gen(struct sa_cmdl_cfg *cfg, u8 *cmdl,
- struct sa_cmdl_upd_info *upd_info)
- {
- u8 enc_offset = 0, auth_offset = 0, total = 0;
- u8 enc_next_eng = SA_ENG_ID_OUTPORT2;
- u8 auth_next_eng = SA_ENG_ID_OUTPORT2;
- u32 *word_ptr = (u32 *)cmdl;
- int i;
- /* Clear the command label */
- memzero_explicit(cmdl, (SA_MAX_CMDL_WORDS * sizeof(u32)));
- /* Iniialize the command update structure */
- memzero_explicit(upd_info, sizeof(*upd_info));
- if (cfg->enc_eng_id && cfg->auth_eng_id) {
- if (cfg->enc) {
- auth_offset = SA_CMDL_HEADER_SIZE_BYTES;
- enc_next_eng = cfg->auth_eng_id;
- if (cfg->iv_size)
- auth_offset += cfg->iv_size;
- } else {
- enc_offset = SA_CMDL_HEADER_SIZE_BYTES;
- auth_next_eng = cfg->enc_eng_id;
- }
- }
- if (cfg->enc_eng_id) {
- upd_info->flags |= SA_CMDL_UPD_ENC;
- upd_info->enc_size.index = enc_offset >> 2;
- upd_info->enc_offset.index = upd_info->enc_size.index + 1;
- /* Encryption command label */
- cmdl[enc_offset + SA_CMDL_OFFSET_NESC] = enc_next_eng;
- /* Encryption modes requiring IV */
- if (cfg->iv_size) {
- upd_info->flags |= SA_CMDL_UPD_ENC_IV;
- upd_info->enc_iv.index =
- (enc_offset + SA_CMDL_HEADER_SIZE_BYTES) >> 2;
- upd_info->enc_iv.size = cfg->iv_size;
- cmdl[enc_offset + SA_CMDL_OFFSET_LABEL_LEN] =
- SA_CMDL_HEADER_SIZE_BYTES + cfg->iv_size;
- cmdl[enc_offset + SA_CMDL_OFFSET_OPTION_CTRL1] =
- (SA_CTX_ENC_AUX2_OFFSET | (cfg->iv_size >> 3));
- total += SA_CMDL_HEADER_SIZE_BYTES + cfg->iv_size;
- } else {
- cmdl[enc_offset + SA_CMDL_OFFSET_LABEL_LEN] =
- SA_CMDL_HEADER_SIZE_BYTES;
- total += SA_CMDL_HEADER_SIZE_BYTES;
- }
- }
- if (cfg->auth_eng_id) {
- upd_info->flags |= SA_CMDL_UPD_AUTH;
- upd_info->auth_size.index = auth_offset >> 2;
- upd_info->auth_offset.index = upd_info->auth_size.index + 1;
- cmdl[auth_offset + SA_CMDL_OFFSET_NESC] = auth_next_eng;
- cmdl[auth_offset + SA_CMDL_OFFSET_LABEL_LEN] =
- SA_CMDL_HEADER_SIZE_BYTES;
- total += SA_CMDL_HEADER_SIZE_BYTES;
- }
- total = roundup(total, 8);
- for (i = 0; i < total / 4; i++)
- word_ptr[i] = swab32(word_ptr[i]);
- return total;
- }
- /* Update Command label */
- static inline void sa_update_cmdl(struct sa_req *req, u32 *cmdl,
- struct sa_cmdl_upd_info *upd_info)
- {
- int i = 0, j;
- if (likely(upd_info->flags & SA_CMDL_UPD_ENC)) {
- cmdl[upd_info->enc_size.index] &= ~SA_CMDL_PAYLOAD_LENGTH_MASK;
- cmdl[upd_info->enc_size.index] |= req->enc_size;
- cmdl[upd_info->enc_offset.index] &=
- ~SA_CMDL_SOP_BYPASS_LEN_MASK;
- cmdl[upd_info->enc_offset.index] |=
- FIELD_PREP(SA_CMDL_SOP_BYPASS_LEN_MASK,
- req->enc_offset);
- if (likely(upd_info->flags & SA_CMDL_UPD_ENC_IV)) {
- __be32 *data = (__be32 *)&cmdl[upd_info->enc_iv.index];
- u32 *enc_iv = (u32 *)req->enc_iv;
- for (j = 0; i < upd_info->enc_iv.size; i += 4, j++) {
- data[j] = cpu_to_be32(*enc_iv);
- enc_iv++;
- }
- }
- }
- if (likely(upd_info->flags & SA_CMDL_UPD_AUTH)) {
- cmdl[upd_info->auth_size.index] &= ~SA_CMDL_PAYLOAD_LENGTH_MASK;
- cmdl[upd_info->auth_size.index] |= req->auth_size;
- cmdl[upd_info->auth_offset.index] &=
- ~SA_CMDL_SOP_BYPASS_LEN_MASK;
- cmdl[upd_info->auth_offset.index] |=
- FIELD_PREP(SA_CMDL_SOP_BYPASS_LEN_MASK,
- req->auth_offset);
- if (upd_info->flags & SA_CMDL_UPD_AUTH_IV) {
- sa_copy_iv((void *)&cmdl[upd_info->auth_iv.index],
- req->auth_iv,
- (upd_info->auth_iv.size > 8));
- }
- if (upd_info->flags & SA_CMDL_UPD_AUX_KEY) {
- int offset = (req->auth_size & 0xF) ? 4 : 0;
- memcpy(&cmdl[upd_info->aux_key_info.index],
- &upd_info->aux_key[offset], 16);
- }
- }
- }
- /* Format SWINFO words to be sent to SA */
- static
- void sa_set_swinfo(u8 eng_id, u16 sc_id, dma_addr_t sc_phys,
- u8 cmdl_present, u8 cmdl_offset, u8 flags,
- u8 hash_size, u32 *swinfo)
- {
- swinfo[0] = sc_id;
- swinfo[0] |= FIELD_PREP(SA_SW0_FLAGS_MASK, flags);
- if (likely(cmdl_present))
- swinfo[0] |= FIELD_PREP(SA_SW0_CMDL_INFO_MASK,
- cmdl_offset | SA_SW0_CMDL_PRESENT);
- swinfo[0] |= FIELD_PREP(SA_SW0_ENG_ID_MASK, eng_id);
- swinfo[0] |= SA_SW0_DEST_INFO_PRESENT;
- swinfo[1] = (u32)(sc_phys & 0xFFFFFFFFULL);
- swinfo[2] = (u32)((sc_phys & 0xFFFFFFFF00000000ULL) >> 32);
- swinfo[2] |= FIELD_PREP(SA_SW2_EGRESS_LENGTH, hash_size);
- }
- /* Dump the security context */
- static void sa_dump_sc(u8 *buf, dma_addr_t dma_addr)
- {
- #ifdef DEBUG
- dev_info(sa_k3_dev, "Security context dump:: 0x%pad\n", &dma_addr);
- print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
- 16, 1, buf, SA_CTX_MAX_SZ, false);
- #endif
- }
- static
- int sa_init_sc(struct sa_ctx_info *ctx, const struct sa_match_data *match_data,
- const u8 *enc_key, u16 enc_key_sz,
- const u8 *auth_key, u16 auth_key_sz,
- struct algo_data *ad, u8 enc, u32 *swinfo)
- {
- int enc_sc_offset = 0;
- int auth_sc_offset = 0;
- u8 *sc_buf = ctx->sc;
- u16 sc_id = ctx->sc_id;
- u8 first_engine = 0;
- memzero_explicit(sc_buf, SA_CTX_MAX_SZ);
- if (ad->auth_eng.eng_id) {
- if (enc)
- first_engine = ad->enc_eng.eng_id;
- else
- first_engine = ad->auth_eng.eng_id;
- enc_sc_offset = SA_CTX_PHP_PE_CTX_SZ;
- auth_sc_offset = enc_sc_offset + ad->enc_eng.sc_size;
- sc_buf[1] = SA_SCCTL_FE_AUTH_ENC;
- if (!ad->hash_size)
- return -EINVAL;
- ad->hash_size = roundup(ad->hash_size, 8);
- } else if (ad->enc_eng.eng_id && !ad->auth_eng.eng_id) {
- enc_sc_offset = SA_CTX_PHP_PE_CTX_SZ;
- first_engine = ad->enc_eng.eng_id;
- sc_buf[1] = SA_SCCTL_FE_ENC;
- ad->hash_size = ad->iv_out_size;
- }
- /* SCCTL Owner info: 0=host, 1=CP_ACE */
- sc_buf[SA_CTX_SCCTL_OWNER_OFFSET] = 0;
- memcpy(&sc_buf[2], &sc_id, 2);
- sc_buf[4] = 0x0;
- sc_buf[5] = match_data->priv_id;
- sc_buf[6] = match_data->priv;
- sc_buf[7] = 0x0;
- /* Prepare context for encryption engine */
- if (ad->enc_eng.sc_size) {
- if (sa_set_sc_enc(ad, enc_key, enc_key_sz, enc,
- &sc_buf[enc_sc_offset]))
- return -EINVAL;
- }
- /* Prepare context for authentication engine */
- if (ad->auth_eng.sc_size)
- sa_set_sc_auth(ad, auth_key, auth_key_sz,
- &sc_buf[auth_sc_offset]);
- /* Set the ownership of context to CP_ACE */
- sc_buf[SA_CTX_SCCTL_OWNER_OFFSET] = 0x80;
- /* swizzle the security context */
- sa_swiz_128(sc_buf, SA_CTX_MAX_SZ);
- sa_set_swinfo(first_engine, ctx->sc_id, ctx->sc_phys, 1, 0,
- SA_SW_INFO_FLAG_EVICT, ad->hash_size, swinfo);
- sa_dump_sc(sc_buf, ctx->sc_phys);
- return 0;
- }
- /* Free the per direction context memory */
- static void sa_free_ctx_info(struct sa_ctx_info *ctx,
- struct sa_crypto_data *data)
- {
- unsigned long bn;
- bn = ctx->sc_id - data->sc_id_start;
- spin_lock(&data->scid_lock);
- __clear_bit(bn, data->ctx_bm);
- data->sc_id--;
- spin_unlock(&data->scid_lock);
- if (ctx->sc) {
- dma_pool_free(data->sc_pool, ctx->sc, ctx->sc_phys);
- ctx->sc = NULL;
- }
- }
- static int sa_init_ctx_info(struct sa_ctx_info *ctx,
- struct sa_crypto_data *data)
- {
- unsigned long bn;
- int err;
- spin_lock(&data->scid_lock);
- bn = find_first_zero_bit(data->ctx_bm, SA_MAX_NUM_CTX);
- __set_bit(bn, data->ctx_bm);
- data->sc_id++;
- spin_unlock(&data->scid_lock);
- ctx->sc_id = (u16)(data->sc_id_start + bn);
- ctx->sc = dma_pool_alloc(data->sc_pool, GFP_KERNEL, &ctx->sc_phys);
- if (!ctx->sc) {
- dev_err(&data->pdev->dev, "Failed to allocate SC memory\n");
- err = -ENOMEM;
- goto scid_rollback;
- }
- return 0;
- scid_rollback:
- spin_lock(&data->scid_lock);
- __clear_bit(bn, data->ctx_bm);
- data->sc_id--;
- spin_unlock(&data->scid_lock);
- return err;
- }
- static void sa_cipher_cra_exit(struct crypto_skcipher *tfm)
- {
- struct sa_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
- struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
- dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
- __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
- ctx->dec.sc_id, &ctx->dec.sc_phys);
- sa_free_ctx_info(&ctx->enc, data);
- sa_free_ctx_info(&ctx->dec, data);
- crypto_free_skcipher(ctx->fallback.skcipher);
- }
- static int sa_cipher_cra_init(struct crypto_skcipher *tfm)
- {
- struct sa_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
- struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
- const char *name = crypto_tfm_alg_name(&tfm->base);
- struct crypto_skcipher *child;
- int ret;
- memzero_explicit(ctx, sizeof(*ctx));
- ctx->dev_data = data;
- ret = sa_init_ctx_info(&ctx->enc, data);
- if (ret)
- return ret;
- ret = sa_init_ctx_info(&ctx->dec, data);
- if (ret) {
- sa_free_ctx_info(&ctx->enc, data);
- return ret;
- }
- child = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK);
- if (IS_ERR(child)) {
- dev_err(sa_k3_dev, "Error allocating fallback algo %s\n", name);
- return PTR_ERR(child);
- }
- ctx->fallback.skcipher = child;
- crypto_skcipher_set_reqsize(tfm, crypto_skcipher_reqsize(child) +
- sizeof(struct skcipher_request));
- dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
- __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
- ctx->dec.sc_id, &ctx->dec.sc_phys);
- return 0;
- }
- static int sa_cipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
- unsigned int keylen, struct algo_data *ad)
- {
- struct sa_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
- struct crypto_skcipher *child = ctx->fallback.skcipher;
- int cmdl_len;
- struct sa_cmdl_cfg cfg;
- int ret;
- if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
- keylen != AES_KEYSIZE_256)
- return -EINVAL;
- ad->enc_eng.eng_id = SA_ENG_ID_EM1;
- ad->enc_eng.sc_size = SA_CTX_ENC_TYPE1_SZ;
- memzero_explicit(&cfg, sizeof(cfg));
- cfg.enc_eng_id = ad->enc_eng.eng_id;
- cfg.iv_size = crypto_skcipher_ivsize(tfm);
- crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
- crypto_skcipher_set_flags(child, tfm->base.crt_flags &
- CRYPTO_TFM_REQ_MASK);
- ret = crypto_skcipher_setkey(child, key, keylen);
- if (ret)
- return ret;
- /* Setup Encryption Security Context & Command label template */
- if (sa_init_sc(&ctx->enc, ctx->dev_data->match_data, key, keylen, NULL, 0,
- ad, 1, &ctx->enc.epib[1]))
- goto badkey;
- cmdl_len = sa_format_cmdl_gen(&cfg,
- (u8 *)ctx->enc.cmdl,
- &ctx->enc.cmdl_upd_info);
- if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
- goto badkey;
- ctx->enc.cmdl_size = cmdl_len;
- /* Setup Decryption Security Context & Command label template */
- if (sa_init_sc(&ctx->dec, ctx->dev_data->match_data, key, keylen, NULL, 0,
- ad, 0, &ctx->dec.epib[1]))
- goto badkey;
- cfg.enc_eng_id = ad->enc_eng.eng_id;
- cmdl_len = sa_format_cmdl_gen(&cfg, (u8 *)ctx->dec.cmdl,
- &ctx->dec.cmdl_upd_info);
- if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
- goto badkey;
- ctx->dec.cmdl_size = cmdl_len;
- ctx->iv_idx = ad->iv_idx;
- return 0;
- badkey:
- dev_err(sa_k3_dev, "%s: badkey\n", __func__);
- return -EINVAL;
- }
- static int sa_aes_cbc_setkey(struct crypto_skcipher *tfm, const u8 *key,
- unsigned int keylen)
- {
- struct algo_data ad = { 0 };
- /* Convert the key size (16/24/32) to the key size index (0/1/2) */
- int key_idx = (keylen >> 3) - 2;
- if (key_idx >= 3)
- return -EINVAL;
- ad.mci_enc = mci_cbc_enc_array[key_idx];
- ad.mci_dec = mci_cbc_dec_array[key_idx];
- ad.inv_key = true;
- ad.ealg_id = SA_EALG_ID_AES_CBC;
- ad.iv_idx = 4;
- ad.iv_out_size = 16;
- return sa_cipher_setkey(tfm, key, keylen, &ad);
- }
- static int sa_aes_ecb_setkey(struct crypto_skcipher *tfm, const u8 *key,
- unsigned int keylen)
- {
- struct algo_data ad = { 0 };
- /* Convert the key size (16/24/32) to the key size index (0/1/2) */
- int key_idx = (keylen >> 3) - 2;
- if (key_idx >= 3)
- return -EINVAL;
- ad.mci_enc = mci_ecb_enc_array[key_idx];
- ad.mci_dec = mci_ecb_dec_array[key_idx];
- ad.inv_key = true;
- ad.ealg_id = SA_EALG_ID_AES_ECB;
- return sa_cipher_setkey(tfm, key, keylen, &ad);
- }
- static int sa_3des_cbc_setkey(struct crypto_skcipher *tfm, const u8 *key,
- unsigned int keylen)
- {
- struct algo_data ad = { 0 };
- ad.mci_enc = mci_cbc_3des_enc_array;
- ad.mci_dec = mci_cbc_3des_dec_array;
- ad.ealg_id = SA_EALG_ID_3DES_CBC;
- ad.iv_idx = 6;
- ad.iv_out_size = 8;
- return sa_cipher_setkey(tfm, key, keylen, &ad);
- }
- static int sa_3des_ecb_setkey(struct crypto_skcipher *tfm, const u8 *key,
- unsigned int keylen)
- {
- struct algo_data ad = { 0 };
- ad.mci_enc = mci_ecb_3des_enc_array;
- ad.mci_dec = mci_ecb_3des_dec_array;
- return sa_cipher_setkey(tfm, key, keylen, &ad);
- }
- static void sa_sync_from_device(struct sa_rx_data *rxd)
- {
- struct sg_table *sgt;
- if (rxd->mapped_sg[0].dir == DMA_BIDIRECTIONAL)
- sgt = &rxd->mapped_sg[0].sgt;
- else
- sgt = &rxd->mapped_sg[1].sgt;
- dma_sync_sgtable_for_cpu(rxd->ddev, sgt, DMA_FROM_DEVICE);
- }
- static void sa_free_sa_rx_data(struct sa_rx_data *rxd)
- {
- int i;
- for (i = 0; i < ARRAY_SIZE(rxd->mapped_sg); i++) {
- struct sa_mapped_sg *mapped_sg = &rxd->mapped_sg[i];
- if (mapped_sg->mapped) {
- dma_unmap_sgtable(rxd->ddev, &mapped_sg->sgt,
- mapped_sg->dir, 0);
- kfree(mapped_sg->split_sg);
- }
- }
- kfree(rxd);
- }
- static void sa_aes_dma_in_callback(void *data)
- {
- struct sa_rx_data *rxd = (struct sa_rx_data *)data;
- struct skcipher_request *req;
- u32 *result;
- __be32 *mdptr;
- size_t ml, pl;
- int i;
- sa_sync_from_device(rxd);
- req = container_of(rxd->req, struct skcipher_request, base);
- if (req->iv) {
- mdptr = (__be32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl,
- &ml);
- result = (u32 *)req->iv;
- for (i = 0; i < (rxd->enc_iv_size / 4); i++)
- result[i] = be32_to_cpu(mdptr[i + rxd->iv_idx]);
- }
- sa_free_sa_rx_data(rxd);
- skcipher_request_complete(req, 0);
- }
- static void
- sa_prepare_tx_desc(u32 *mdptr, u32 pslen, u32 *psdata, u32 epiblen, u32 *epib)
- {
- u32 *out, *in;
- int i;
- for (out = mdptr, in = epib, i = 0; i < epiblen / sizeof(u32); i++)
- *out++ = *in++;
- mdptr[4] = (0xFFFF << 16);
- for (out = &mdptr[5], in = psdata, i = 0;
- i < pslen / sizeof(u32); i++)
- *out++ = *in++;
- }
- static int sa_run(struct sa_req *req)
- {
- struct sa_rx_data *rxd;
- gfp_t gfp_flags;
- u32 cmdl[SA_MAX_CMDL_WORDS];
- struct sa_crypto_data *pdata = dev_get_drvdata(sa_k3_dev);
- struct device *ddev;
- struct dma_chan *dma_rx;
- int sg_nents, src_nents, dst_nents;
- struct scatterlist *src, *dst;
- size_t pl, ml, split_size;
- struct sa_ctx_info *sa_ctx = req->enc ? &req->ctx->enc : &req->ctx->dec;
- int ret;
- struct dma_async_tx_descriptor *tx_out;
- u32 *mdptr;
- bool diff_dst;
- enum dma_data_direction dir_src;
- struct sa_mapped_sg *mapped_sg;
- gfp_flags = req->base->flags & CRYPTO_TFM_REQ_MAY_SLEEP ?
- GFP_KERNEL : GFP_ATOMIC;
- rxd = kzalloc(sizeof(*rxd), gfp_flags);
- if (!rxd)
- return -ENOMEM;
- if (req->src != req->dst) {
- diff_dst = true;
- dir_src = DMA_TO_DEVICE;
- } else {
- diff_dst = false;
- dir_src = DMA_BIDIRECTIONAL;
- }
- /*
- * SA2UL has an interesting feature where the receive DMA channel
- * is selected based on the data passed to the engine. Within the
- * transition range, there is also a space where it is impossible
- * to determine where the data will end up, and this should be
- * avoided. This will be handled by the SW fallback mechanism by
- * the individual algorithm implementations.
- */
- if (req->size >= 256)
- dma_rx = pdata->dma_rx2;
- else
- dma_rx = pdata->dma_rx1;
- ddev = dmaengine_get_dma_device(pdata->dma_tx);
- rxd->ddev = ddev;
- memcpy(cmdl, sa_ctx->cmdl, sa_ctx->cmdl_size);
- sa_update_cmdl(req, cmdl, &sa_ctx->cmdl_upd_info);
- if (req->type != CRYPTO_ALG_TYPE_AHASH) {
- if (req->enc)
- req->type |=
- (SA_REQ_SUBTYPE_ENC << SA_REQ_SUBTYPE_SHIFT);
- else
- req->type |=
- (SA_REQ_SUBTYPE_DEC << SA_REQ_SUBTYPE_SHIFT);
- }
- cmdl[sa_ctx->cmdl_size / sizeof(u32)] = req->type;
- /*
- * Map the packets, first we check if the data fits into a single
- * sg entry and use that if possible. If it does not fit, we check
- * if we need to do sg_split to align the scatterlist data on the
- * actual data size being processed by the crypto engine.
- */
- src = req->src;
- sg_nents = sg_nents_for_len(src, req->size);
- split_size = req->size;
- mapped_sg = &rxd->mapped_sg[0];
- if (sg_nents == 1 && split_size <= req->src->length) {
- src = &mapped_sg->static_sg;
- src_nents = 1;
- sg_init_table(src, 1);
- sg_set_page(src, sg_page(req->src), split_size,
- req->src->offset);
- mapped_sg->sgt.sgl = src;
- mapped_sg->sgt.orig_nents = src_nents;
- ret = dma_map_sgtable(ddev, &mapped_sg->sgt, dir_src, 0);
- if (ret) {
- kfree(rxd);
- return ret;
- }
- mapped_sg->dir = dir_src;
- mapped_sg->mapped = true;
- } else {
- mapped_sg->sgt.sgl = req->src;
- mapped_sg->sgt.orig_nents = sg_nents;
- ret = dma_map_sgtable(ddev, &mapped_sg->sgt, dir_src, 0);
- if (ret) {
- kfree(rxd);
- return ret;
- }
- mapped_sg->dir = dir_src;
- mapped_sg->mapped = true;
- ret = sg_split(mapped_sg->sgt.sgl, mapped_sg->sgt.nents, 0, 1,
- &split_size, &src, &src_nents, gfp_flags);
- if (ret) {
- src_nents = mapped_sg->sgt.nents;
- src = mapped_sg->sgt.sgl;
- } else {
- mapped_sg->split_sg = src;
- }
- }
- dma_sync_sgtable_for_device(ddev, &mapped_sg->sgt, DMA_TO_DEVICE);
- if (!diff_dst) {
- dst_nents = src_nents;
- dst = src;
- } else {
- dst_nents = sg_nents_for_len(req->dst, req->size);
- mapped_sg = &rxd->mapped_sg[1];
- if (dst_nents == 1 && split_size <= req->dst->length) {
- dst = &mapped_sg->static_sg;
- dst_nents = 1;
- sg_init_table(dst, 1);
- sg_set_page(dst, sg_page(req->dst), split_size,
- req->dst->offset);
- mapped_sg->sgt.sgl = dst;
- mapped_sg->sgt.orig_nents = dst_nents;
- ret = dma_map_sgtable(ddev, &mapped_sg->sgt,
- DMA_FROM_DEVICE, 0);
- if (ret)
- goto err_cleanup;
- mapped_sg->dir = DMA_FROM_DEVICE;
- mapped_sg->mapped = true;
- } else {
- mapped_sg->sgt.sgl = req->dst;
- mapped_sg->sgt.orig_nents = dst_nents;
- ret = dma_map_sgtable(ddev, &mapped_sg->sgt,
- DMA_FROM_DEVICE, 0);
- if (ret)
- goto err_cleanup;
- mapped_sg->dir = DMA_FROM_DEVICE;
- mapped_sg->mapped = true;
- ret = sg_split(mapped_sg->sgt.sgl, mapped_sg->sgt.nents,
- 0, 1, &split_size, &dst, &dst_nents,
- gfp_flags);
- if (ret) {
- dst_nents = mapped_sg->sgt.nents;
- dst = mapped_sg->sgt.sgl;
- } else {
- mapped_sg->split_sg = dst;
- }
- }
- }
- rxd->tx_in = dmaengine_prep_slave_sg(dma_rx, dst, dst_nents,
- DMA_DEV_TO_MEM,
- DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
- if (!rxd->tx_in) {
- dev_err(pdata->dev, "IN prep_slave_sg() failed\n");
- ret = -EINVAL;
- goto err_cleanup;
- }
- rxd->req = (void *)req->base;
- rxd->enc = req->enc;
- rxd->iv_idx = req->ctx->iv_idx;
- rxd->enc_iv_size = sa_ctx->cmdl_upd_info.enc_iv.size;
- rxd->tx_in->callback = req->callback;
- rxd->tx_in->callback_param = rxd;
- tx_out = dmaengine_prep_slave_sg(pdata->dma_tx, src,
- src_nents, DMA_MEM_TO_DEV,
- DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
- if (!tx_out) {
- dev_err(pdata->dev, "OUT prep_slave_sg() failed\n");
- ret = -EINVAL;
- goto err_cleanup;
- }
- /*
- * Prepare metadata for DMA engine. This essentially describes the
- * crypto algorithm to be used, data sizes, different keys etc.
- */
- mdptr = (u32 *)dmaengine_desc_get_metadata_ptr(tx_out, &pl, &ml);
- sa_prepare_tx_desc(mdptr, (sa_ctx->cmdl_size + (SA_PSDATA_CTX_WORDS *
- sizeof(u32))), cmdl, sizeof(sa_ctx->epib),
- sa_ctx->epib);
- ml = sa_ctx->cmdl_size + (SA_PSDATA_CTX_WORDS * sizeof(u32));
- dmaengine_desc_set_metadata_len(tx_out, req->mdata_size);
- dmaengine_submit(tx_out);
- dmaengine_submit(rxd->tx_in);
- dma_async_issue_pending(dma_rx);
- dma_async_issue_pending(pdata->dma_tx);
- return -EINPROGRESS;
- err_cleanup:
- sa_free_sa_rx_data(rxd);
- return ret;
- }
- static int sa_cipher_run(struct skcipher_request *req, u8 *iv, int enc)
- {
- struct sa_tfm_ctx *ctx =
- crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
- struct crypto_alg *alg = req->base.tfm->__crt_alg;
- struct sa_req sa_req = { 0 };
- if (!req->cryptlen)
- return 0;
- if (req->cryptlen % alg->cra_blocksize)
- return -EINVAL;
- /* Use SW fallback if the data size is not supported */
- if (req->cryptlen > SA_MAX_DATA_SZ ||
- (req->cryptlen >= SA_UNSAFE_DATA_SZ_MIN &&
- req->cryptlen <= SA_UNSAFE_DATA_SZ_MAX)) {
- struct skcipher_request *subreq = skcipher_request_ctx(req);
- skcipher_request_set_tfm(subreq, ctx->fallback.skcipher);
- skcipher_request_set_callback(subreq, req->base.flags,
- req->base.complete,
- req->base.data);
- skcipher_request_set_crypt(subreq, req->src, req->dst,
- req->cryptlen, req->iv);
- if (enc)
- return crypto_skcipher_encrypt(subreq);
- else
- return crypto_skcipher_decrypt(subreq);
- }
- sa_req.size = req->cryptlen;
- sa_req.enc_size = req->cryptlen;
- sa_req.src = req->src;
- sa_req.dst = req->dst;
- sa_req.enc_iv = iv;
- sa_req.type = CRYPTO_ALG_TYPE_SKCIPHER;
- sa_req.enc = enc;
- sa_req.callback = sa_aes_dma_in_callback;
- sa_req.mdata_size = 44;
- sa_req.base = &req->base;
- sa_req.ctx = ctx;
- return sa_run(&sa_req);
- }
- static int sa_encrypt(struct skcipher_request *req)
- {
- return sa_cipher_run(req, req->iv, 1);
- }
- static int sa_decrypt(struct skcipher_request *req)
- {
- return sa_cipher_run(req, req->iv, 0);
- }
- static void sa_sha_dma_in_callback(void *data)
- {
- struct sa_rx_data *rxd = (struct sa_rx_data *)data;
- struct ahash_request *req;
- struct crypto_ahash *tfm;
- unsigned int authsize;
- int i;
- size_t ml, pl;
- u32 *result;
- __be32 *mdptr;
- sa_sync_from_device(rxd);
- req = container_of(rxd->req, struct ahash_request, base);
- tfm = crypto_ahash_reqtfm(req);
- authsize = crypto_ahash_digestsize(tfm);
- mdptr = (__be32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl, &ml);
- result = (u32 *)req->result;
- for (i = 0; i < (authsize / 4); i++)
- result[i] = be32_to_cpu(mdptr[i + 4]);
- sa_free_sa_rx_data(rxd);
- ahash_request_complete(req, 0);
- }
- static int zero_message_process(struct ahash_request *req)
- {
- struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
- int sa_digest_size = crypto_ahash_digestsize(tfm);
- switch (sa_digest_size) {
- case SHA1_DIGEST_SIZE:
- memcpy(req->result, sha1_zero_message_hash, sa_digest_size);
- break;
- case SHA256_DIGEST_SIZE:
- memcpy(req->result, sha256_zero_message_hash, sa_digest_size);
- break;
- case SHA512_DIGEST_SIZE:
- memcpy(req->result, sha512_zero_message_hash, sa_digest_size);
- break;
- default:
- return -EINVAL;
- }
- return 0;
- }
- static int sa_sha_run(struct ahash_request *req)
- {
- struct sa_tfm_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
- struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
- struct sa_req sa_req = { 0 };
- size_t auth_len;
- auth_len = req->nbytes;
- if (!auth_len)
- return zero_message_process(req);
- if (auth_len > SA_MAX_DATA_SZ ||
- (auth_len >= SA_UNSAFE_DATA_SZ_MIN &&
- auth_len <= SA_UNSAFE_DATA_SZ_MAX)) {
- struct ahash_request *subreq = &rctx->fallback_req;
- int ret = 0;
- ahash_request_set_tfm(subreq, ctx->fallback.ahash);
- subreq->base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
- crypto_ahash_init(subreq);
- subreq->nbytes = auth_len;
- subreq->src = req->src;
- subreq->result = req->result;
- ret |= crypto_ahash_update(subreq);
- subreq->nbytes = 0;
- ret |= crypto_ahash_final(subreq);
- return ret;
- }
- sa_req.size = auth_len;
- sa_req.auth_size = auth_len;
- sa_req.src = req->src;
- sa_req.dst = req->src;
- sa_req.enc = true;
- sa_req.type = CRYPTO_ALG_TYPE_AHASH;
- sa_req.callback = sa_sha_dma_in_callback;
- sa_req.mdata_size = 28;
- sa_req.ctx = ctx;
- sa_req.base = &req->base;
- return sa_run(&sa_req);
- }
- static int sa_sha_setup(struct sa_tfm_ctx *ctx, struct algo_data *ad)
- {
- int bs = crypto_shash_blocksize(ctx->shash);
- int cmdl_len;
- struct sa_cmdl_cfg cfg;
- ad->enc_eng.sc_size = SA_CTX_ENC_TYPE1_SZ;
- ad->auth_eng.eng_id = SA_ENG_ID_AM1;
- ad->auth_eng.sc_size = SA_CTX_AUTH_TYPE2_SZ;
- memset(ctx->authkey, 0, bs);
- memset(&cfg, 0, sizeof(cfg));
- cfg.aalg = ad->aalg_id;
- cfg.enc_eng_id = ad->enc_eng.eng_id;
- cfg.auth_eng_id = ad->auth_eng.eng_id;
- cfg.iv_size = 0;
- cfg.akey = NULL;
- cfg.akey_len = 0;
- ctx->dev_data = dev_get_drvdata(sa_k3_dev);
- /* Setup Encryption Security Context & Command label template */
- if (sa_init_sc(&ctx->enc, ctx->dev_data->match_data, NULL, 0, NULL, 0,
- ad, 0, &ctx->enc.epib[1]))
- goto badkey;
- cmdl_len = sa_format_cmdl_gen(&cfg,
- (u8 *)ctx->enc.cmdl,
- &ctx->enc.cmdl_upd_info);
- if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
- goto badkey;
- ctx->enc.cmdl_size = cmdl_len;
- return 0;
- badkey:
- dev_err(sa_k3_dev, "%s: badkey\n", __func__);
- return -EINVAL;
- }
- static int sa_sha_cra_init_alg(struct crypto_tfm *tfm, const char *alg_base)
- {
- struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
- struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
- int ret;
- memset(ctx, 0, sizeof(*ctx));
- ctx->dev_data = data;
- ret = sa_init_ctx_info(&ctx->enc, data);
- if (ret)
- return ret;
- if (alg_base) {
- ctx->shash = crypto_alloc_shash(alg_base, 0,
- CRYPTO_ALG_NEED_FALLBACK);
- if (IS_ERR(ctx->shash)) {
- dev_err(sa_k3_dev, "base driver %s couldn't be loaded\n",
- alg_base);
- return PTR_ERR(ctx->shash);
- }
- /* for fallback */
- ctx->fallback.ahash =
- crypto_alloc_ahash(alg_base, 0,
- CRYPTO_ALG_NEED_FALLBACK);
- if (IS_ERR(ctx->fallback.ahash)) {
- dev_err(ctx->dev_data->dev,
- "Could not load fallback driver\n");
- return PTR_ERR(ctx->fallback.ahash);
- }
- }
- dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
- __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
- ctx->dec.sc_id, &ctx->dec.sc_phys);
- crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
- sizeof(struct sa_sha_req_ctx) +
- crypto_ahash_reqsize(ctx->fallback.ahash));
- return 0;
- }
- static int sa_sha_digest(struct ahash_request *req)
- {
- return sa_sha_run(req);
- }
- static int sa_sha_init(struct ahash_request *req)
- {
- struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
- struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
- struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
- dev_dbg(sa_k3_dev, "init: digest size: %u, rctx=%p\n",
- crypto_ahash_digestsize(tfm), rctx);
- ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
- rctx->fallback_req.base.flags =
- req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
- return crypto_ahash_init(&rctx->fallback_req);
- }
- static int sa_sha_update(struct ahash_request *req)
- {
- struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
- struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
- struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
- ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
- rctx->fallback_req.base.flags =
- req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
- rctx->fallback_req.nbytes = req->nbytes;
- rctx->fallback_req.src = req->src;
- return crypto_ahash_update(&rctx->fallback_req);
- }
- static int sa_sha_final(struct ahash_request *req)
- {
- struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
- struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
- struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
- ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
- rctx->fallback_req.base.flags =
- req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
- rctx->fallback_req.result = req->result;
- return crypto_ahash_final(&rctx->fallback_req);
- }
- static int sa_sha_finup(struct ahash_request *req)
- {
- struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
- struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
- struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
- ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
- rctx->fallback_req.base.flags =
- req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
- rctx->fallback_req.nbytes = req->nbytes;
- rctx->fallback_req.src = req->src;
- rctx->fallback_req.result = req->result;
- return crypto_ahash_finup(&rctx->fallback_req);
- }
- static int sa_sha_import(struct ahash_request *req, const void *in)
- {
- struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
- struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
- struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
- ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
- rctx->fallback_req.base.flags = req->base.flags &
- CRYPTO_TFM_REQ_MAY_SLEEP;
- return crypto_ahash_import(&rctx->fallback_req, in);
- }
- static int sa_sha_export(struct ahash_request *req, void *out)
- {
- struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
- struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
- struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
- struct ahash_request *subreq = &rctx->fallback_req;
- ahash_request_set_tfm(subreq, ctx->fallback.ahash);
- subreq->base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
- return crypto_ahash_export(subreq, out);
- }
- static int sa_sha1_cra_init(struct crypto_tfm *tfm)
- {
- struct algo_data ad = { 0 };
- struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
- sa_sha_cra_init_alg(tfm, "sha1");
- ad.aalg_id = SA_AALG_ID_SHA1;
- ad.hash_size = SHA1_DIGEST_SIZE;
- ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA1;
- sa_sha_setup(ctx, &ad);
- return 0;
- }
- static int sa_sha256_cra_init(struct crypto_tfm *tfm)
- {
- struct algo_data ad = { 0 };
- struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
- sa_sha_cra_init_alg(tfm, "sha256");
- ad.aalg_id = SA_AALG_ID_SHA2_256;
- ad.hash_size = SHA256_DIGEST_SIZE;
- ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA256;
- sa_sha_setup(ctx, &ad);
- return 0;
- }
- static int sa_sha512_cra_init(struct crypto_tfm *tfm)
- {
- struct algo_data ad = { 0 };
- struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
- sa_sha_cra_init_alg(tfm, "sha512");
- ad.aalg_id = SA_AALG_ID_SHA2_512;
- ad.hash_size = SHA512_DIGEST_SIZE;
- ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA512;
- sa_sha_setup(ctx, &ad);
- return 0;
- }
- static void sa_sha_cra_exit(struct crypto_tfm *tfm)
- {
- struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
- struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
- dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
- __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
- ctx->dec.sc_id, &ctx->dec.sc_phys);
- if (crypto_tfm_alg_type(tfm) == CRYPTO_ALG_TYPE_AHASH)
- sa_free_ctx_info(&ctx->enc, data);
- crypto_free_shash(ctx->shash);
- crypto_free_ahash(ctx->fallback.ahash);
- }
- static void sa_aead_dma_in_callback(void *data)
- {
- struct sa_rx_data *rxd = (struct sa_rx_data *)data;
- struct aead_request *req;
- struct crypto_aead *tfm;
- unsigned int start;
- unsigned int authsize;
- u8 auth_tag[SA_MAX_AUTH_TAG_SZ];
- size_t pl, ml;
- int i;
- int err = 0;
- u32 *mdptr;
- sa_sync_from_device(rxd);
- req = container_of(rxd->req, struct aead_request, base);
- tfm = crypto_aead_reqtfm(req);
- start = req->assoclen + req->cryptlen;
- authsize = crypto_aead_authsize(tfm);
- mdptr = (u32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl, &ml);
- for (i = 0; i < (authsize / 4); i++)
- mdptr[i + 4] = swab32(mdptr[i + 4]);
- if (rxd->enc) {
- scatterwalk_map_and_copy(&mdptr[4], req->dst, start, authsize,
- 1);
- } else {
- start -= authsize;
- scatterwalk_map_and_copy(auth_tag, req->src, start, authsize,
- 0);
- err = memcmp(&mdptr[4], auth_tag, authsize) ? -EBADMSG : 0;
- }
- sa_free_sa_rx_data(rxd);
- aead_request_complete(req, err);
- }
- static int sa_cra_init_aead(struct crypto_aead *tfm, const char *hash,
- const char *fallback)
- {
- struct sa_tfm_ctx *ctx = crypto_aead_ctx(tfm);
- struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
- int ret;
- memzero_explicit(ctx, sizeof(*ctx));
- ctx->dev_data = data;
- ctx->shash = crypto_alloc_shash(hash, 0, CRYPTO_ALG_NEED_FALLBACK);
- if (IS_ERR(ctx->shash)) {
- dev_err(sa_k3_dev, "base driver %s couldn't be loaded\n", hash);
- return PTR_ERR(ctx->shash);
- }
- ctx->fallback.aead = crypto_alloc_aead(fallback, 0,
- CRYPTO_ALG_NEED_FALLBACK);
- if (IS_ERR(ctx->fallback.aead)) {
- dev_err(sa_k3_dev, "fallback driver %s couldn't be loaded\n",
- fallback);
- return PTR_ERR(ctx->fallback.aead);
- }
- crypto_aead_set_reqsize(tfm, sizeof(struct aead_request) +
- crypto_aead_reqsize(ctx->fallback.aead));
- ret = sa_init_ctx_info(&ctx->enc, data);
- if (ret)
- return ret;
- ret = sa_init_ctx_info(&ctx->dec, data);
- if (ret) {
- sa_free_ctx_info(&ctx->enc, data);
- return ret;
- }
- dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
- __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
- ctx->dec.sc_id, &ctx->dec.sc_phys);
- return ret;
- }
- static int sa_cra_init_aead_sha1(struct crypto_aead *tfm)
- {
- return sa_cra_init_aead(tfm, "sha1",
- "authenc(hmac(sha1-ce),cbc(aes-ce))");
- }
- static int sa_cra_init_aead_sha256(struct crypto_aead *tfm)
- {
- return sa_cra_init_aead(tfm, "sha256",
- "authenc(hmac(sha256-ce),cbc(aes-ce))");
- }
- static void sa_exit_tfm_aead(struct crypto_aead *tfm)
- {
- struct sa_tfm_ctx *ctx = crypto_aead_ctx(tfm);
- struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
- crypto_free_shash(ctx->shash);
- crypto_free_aead(ctx->fallback.aead);
- sa_free_ctx_info(&ctx->enc, data);
- sa_free_ctx_info(&ctx->dec, data);
- }
- /* AEAD algorithm configuration interface function */
- static int sa_aead_setkey(struct crypto_aead *authenc,
- const u8 *key, unsigned int keylen,
- struct algo_data *ad)
- {
- struct sa_tfm_ctx *ctx = crypto_aead_ctx(authenc);
- struct crypto_authenc_keys keys;
- int cmdl_len;
- struct sa_cmdl_cfg cfg;
- int key_idx;
- if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
- return -EINVAL;
- /* Convert the key size (16/24/32) to the key size index (0/1/2) */
- key_idx = (keys.enckeylen >> 3) - 2;
- if (key_idx >= 3)
- return -EINVAL;
- ad->ctx = ctx;
- ad->enc_eng.eng_id = SA_ENG_ID_EM1;
- ad->enc_eng.sc_size = SA_CTX_ENC_TYPE1_SZ;
- ad->auth_eng.eng_id = SA_ENG_ID_AM1;
- ad->auth_eng.sc_size = SA_CTX_AUTH_TYPE2_SZ;
- ad->mci_enc = mci_cbc_enc_no_iv_array[key_idx];
- ad->mci_dec = mci_cbc_dec_no_iv_array[key_idx];
- ad->inv_key = true;
- ad->keyed_mac = true;
- ad->ealg_id = SA_EALG_ID_AES_CBC;
- ad->prep_iopad = sa_prepare_iopads;
- memset(&cfg, 0, sizeof(cfg));
- cfg.enc = true;
- cfg.aalg = ad->aalg_id;
- cfg.enc_eng_id = ad->enc_eng.eng_id;
- cfg.auth_eng_id = ad->auth_eng.eng_id;
- cfg.iv_size = crypto_aead_ivsize(authenc);
- cfg.akey = keys.authkey;
- cfg.akey_len = keys.authkeylen;
- /* Setup Encryption Security Context & Command label template */
- if (sa_init_sc(&ctx->enc, ctx->dev_data->match_data, keys.enckey,
- keys.enckeylen, keys.authkey, keys.authkeylen,
- ad, 1, &ctx->enc.epib[1]))
- return -EINVAL;
- cmdl_len = sa_format_cmdl_gen(&cfg,
- (u8 *)ctx->enc.cmdl,
- &ctx->enc.cmdl_upd_info);
- if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
- return -EINVAL;
- ctx->enc.cmdl_size = cmdl_len;
- /* Setup Decryption Security Context & Command label template */
- if (sa_init_sc(&ctx->dec, ctx->dev_data->match_data, keys.enckey,
- keys.enckeylen, keys.authkey, keys.authkeylen,
- ad, 0, &ctx->dec.epib[1]))
- return -EINVAL;
- cfg.enc = false;
- cmdl_len = sa_format_cmdl_gen(&cfg, (u8 *)ctx->dec.cmdl,
- &ctx->dec.cmdl_upd_info);
- if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
- return -EINVAL;
- ctx->dec.cmdl_size = cmdl_len;
- crypto_aead_clear_flags(ctx->fallback.aead, CRYPTO_TFM_REQ_MASK);
- crypto_aead_set_flags(ctx->fallback.aead,
- crypto_aead_get_flags(authenc) &
- CRYPTO_TFM_REQ_MASK);
- crypto_aead_setkey(ctx->fallback.aead, key, keylen);
- return 0;
- }
- static int sa_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
- {
- struct sa_tfm_ctx *ctx = crypto_tfm_ctx(crypto_aead_tfm(tfm));
- return crypto_aead_setauthsize(ctx->fallback.aead, authsize);
- }
- static int sa_aead_cbc_sha1_setkey(struct crypto_aead *authenc,
- const u8 *key, unsigned int keylen)
- {
- struct algo_data ad = { 0 };
- ad.ealg_id = SA_EALG_ID_AES_CBC;
- ad.aalg_id = SA_AALG_ID_HMAC_SHA1;
- ad.hash_size = SHA1_DIGEST_SIZE;
- ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA1;
- return sa_aead_setkey(authenc, key, keylen, &ad);
- }
- static int sa_aead_cbc_sha256_setkey(struct crypto_aead *authenc,
- const u8 *key, unsigned int keylen)
- {
- struct algo_data ad = { 0 };
- ad.ealg_id = SA_EALG_ID_AES_CBC;
- ad.aalg_id = SA_AALG_ID_HMAC_SHA2_256;
- ad.hash_size = SHA256_DIGEST_SIZE;
- ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA256;
- return sa_aead_setkey(authenc, key, keylen, &ad);
- }
- static int sa_aead_run(struct aead_request *req, u8 *iv, int enc)
- {
- struct crypto_aead *tfm = crypto_aead_reqtfm(req);
- struct sa_tfm_ctx *ctx = crypto_aead_ctx(tfm);
- struct sa_req sa_req = { 0 };
- size_t auth_size, enc_size;
- enc_size = req->cryptlen;
- auth_size = req->assoclen + req->cryptlen;
- if (!enc) {
- enc_size -= crypto_aead_authsize(tfm);
- auth_size -= crypto_aead_authsize(tfm);
- }
- if (auth_size > SA_MAX_DATA_SZ ||
- (auth_size >= SA_UNSAFE_DATA_SZ_MIN &&
- auth_size <= SA_UNSAFE_DATA_SZ_MAX)) {
- struct aead_request *subreq = aead_request_ctx(req);
- int ret;
- aead_request_set_tfm(subreq, ctx->fallback.aead);
- aead_request_set_callback(subreq, req->base.flags,
- req->base.complete, req->base.data);
- aead_request_set_crypt(subreq, req->src, req->dst,
- req->cryptlen, req->iv);
- aead_request_set_ad(subreq, req->assoclen);
- ret = enc ? crypto_aead_encrypt(subreq) :
- crypto_aead_decrypt(subreq);
- return ret;
- }
- sa_req.enc_offset = req->assoclen;
- sa_req.enc_size = enc_size;
- sa_req.auth_size = auth_size;
- sa_req.size = auth_size;
- sa_req.enc_iv = iv;
- sa_req.type = CRYPTO_ALG_TYPE_AEAD;
- sa_req.enc = enc;
- sa_req.callback = sa_aead_dma_in_callback;
- sa_req.mdata_size = 52;
- sa_req.base = &req->base;
- sa_req.ctx = ctx;
- sa_req.src = req->src;
- sa_req.dst = req->dst;
- return sa_run(&sa_req);
- }
- /* AEAD algorithm encrypt interface function */
- static int sa_aead_encrypt(struct aead_request *req)
- {
- return sa_aead_run(req, req->iv, 1);
- }
- /* AEAD algorithm decrypt interface function */
- static int sa_aead_decrypt(struct aead_request *req)
- {
- return sa_aead_run(req, req->iv, 0);
- }
- static struct sa_alg_tmpl sa_algs[] = {
- [SA_ALG_CBC_AES] = {
- .type = CRYPTO_ALG_TYPE_SKCIPHER,
- .alg.skcipher = {
- .base.cra_name = "cbc(aes)",
- .base.cra_driver_name = "cbc-aes-sa2ul",
- .base.cra_priority = 30000,
- .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
- CRYPTO_ALG_KERN_DRIVER_ONLY |
- CRYPTO_ALG_ASYNC |
- CRYPTO_ALG_NEED_FALLBACK,
- .base.cra_blocksize = AES_BLOCK_SIZE,
- .base.cra_ctxsize = sizeof(struct sa_tfm_ctx),
- .base.cra_module = THIS_MODULE,
- .init = sa_cipher_cra_init,
- .exit = sa_cipher_cra_exit,
- .min_keysize = AES_MIN_KEY_SIZE,
- .max_keysize = AES_MAX_KEY_SIZE,
- .ivsize = AES_BLOCK_SIZE,
- .setkey = sa_aes_cbc_setkey,
- .encrypt = sa_encrypt,
- .decrypt = sa_decrypt,
- }
- },
- [SA_ALG_EBC_AES] = {
- .type = CRYPTO_ALG_TYPE_SKCIPHER,
- .alg.skcipher = {
- .base.cra_name = "ecb(aes)",
- .base.cra_driver_name = "ecb-aes-sa2ul",
- .base.cra_priority = 30000,
- .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
- CRYPTO_ALG_KERN_DRIVER_ONLY |
- CRYPTO_ALG_ASYNC |
- CRYPTO_ALG_NEED_FALLBACK,
- .base.cra_blocksize = AES_BLOCK_SIZE,
- .base.cra_ctxsize = sizeof(struct sa_tfm_ctx),
- .base.cra_module = THIS_MODULE,
- .init = sa_cipher_cra_init,
- .exit = sa_cipher_cra_exit,
- .min_keysize = AES_MIN_KEY_SIZE,
- .max_keysize = AES_MAX_KEY_SIZE,
- .setkey = sa_aes_ecb_setkey,
- .encrypt = sa_encrypt,
- .decrypt = sa_decrypt,
- }
- },
- [SA_ALG_CBC_DES3] = {
- .type = CRYPTO_ALG_TYPE_SKCIPHER,
- .alg.skcipher = {
- .base.cra_name = "cbc(des3_ede)",
- .base.cra_driver_name = "cbc-des3-sa2ul",
- .base.cra_priority = 30000,
- .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
- CRYPTO_ALG_KERN_DRIVER_ONLY |
- CRYPTO_ALG_ASYNC |
- CRYPTO_ALG_NEED_FALLBACK,
- .base.cra_blocksize = DES_BLOCK_SIZE,
- .base.cra_ctxsize = sizeof(struct sa_tfm_ctx),
- .base.cra_module = THIS_MODULE,
- .init = sa_cipher_cra_init,
- .exit = sa_cipher_cra_exit,
- .min_keysize = 3 * DES_KEY_SIZE,
- .max_keysize = 3 * DES_KEY_SIZE,
- .ivsize = DES_BLOCK_SIZE,
- .setkey = sa_3des_cbc_setkey,
- .encrypt = sa_encrypt,
- .decrypt = sa_decrypt,
- }
- },
- [SA_ALG_ECB_DES3] = {
- .type = CRYPTO_ALG_TYPE_SKCIPHER,
- .alg.skcipher = {
- .base.cra_name = "ecb(des3_ede)",
- .base.cra_driver_name = "ecb-des3-sa2ul",
- .base.cra_priority = 30000,
- .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
- CRYPTO_ALG_KERN_DRIVER_ONLY |
- CRYPTO_ALG_ASYNC |
- CRYPTO_ALG_NEED_FALLBACK,
- .base.cra_blocksize = DES_BLOCK_SIZE,
- .base.cra_ctxsize = sizeof(struct sa_tfm_ctx),
- .base.cra_module = THIS_MODULE,
- .init = sa_cipher_cra_init,
- .exit = sa_cipher_cra_exit,
- .min_keysize = 3 * DES_KEY_SIZE,
- .max_keysize = 3 * DES_KEY_SIZE,
- .setkey = sa_3des_ecb_setkey,
- .encrypt = sa_encrypt,
- .decrypt = sa_decrypt,
- }
- },
- [SA_ALG_SHA1] = {
- .type = CRYPTO_ALG_TYPE_AHASH,
- .alg.ahash = {
- .halg.base = {
- .cra_name = "sha1",
- .cra_driver_name = "sha1-sa2ul",
- .cra_priority = 400,
- .cra_flags = CRYPTO_ALG_TYPE_AHASH |
- CRYPTO_ALG_ASYNC |
- CRYPTO_ALG_KERN_DRIVER_ONLY |
- CRYPTO_ALG_NEED_FALLBACK,
- .cra_blocksize = SHA1_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct sa_tfm_ctx),
- .cra_module = THIS_MODULE,
- .cra_init = sa_sha1_cra_init,
- .cra_exit = sa_sha_cra_exit,
- },
- .halg.digestsize = SHA1_DIGEST_SIZE,
- .halg.statesize = sizeof(struct sa_sha_req_ctx) +
- sizeof(struct sha1_state),
- .init = sa_sha_init,
- .update = sa_sha_update,
- .final = sa_sha_final,
- .finup = sa_sha_finup,
- .digest = sa_sha_digest,
- .export = sa_sha_export,
- .import = sa_sha_import,
- },
- },
- [SA_ALG_SHA256] = {
- .type = CRYPTO_ALG_TYPE_AHASH,
- .alg.ahash = {
- .halg.base = {
- .cra_name = "sha256",
- .cra_driver_name = "sha256-sa2ul",
- .cra_priority = 400,
- .cra_flags = CRYPTO_ALG_TYPE_AHASH |
- CRYPTO_ALG_ASYNC |
- CRYPTO_ALG_KERN_DRIVER_ONLY |
- CRYPTO_ALG_NEED_FALLBACK,
- .cra_blocksize = SHA256_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct sa_tfm_ctx),
- .cra_module = THIS_MODULE,
- .cra_init = sa_sha256_cra_init,
- .cra_exit = sa_sha_cra_exit,
- },
- .halg.digestsize = SHA256_DIGEST_SIZE,
- .halg.statesize = sizeof(struct sa_sha_req_ctx) +
- sizeof(struct sha256_state),
- .init = sa_sha_init,
- .update = sa_sha_update,
- .final = sa_sha_final,
- .finup = sa_sha_finup,
- .digest = sa_sha_digest,
- .export = sa_sha_export,
- .import = sa_sha_import,
- },
- },
- [SA_ALG_SHA512] = {
- .type = CRYPTO_ALG_TYPE_AHASH,
- .alg.ahash = {
- .halg.base = {
- .cra_name = "sha512",
- .cra_driver_name = "sha512-sa2ul",
- .cra_priority = 400,
- .cra_flags = CRYPTO_ALG_TYPE_AHASH |
- CRYPTO_ALG_ASYNC |
- CRYPTO_ALG_KERN_DRIVER_ONLY |
- CRYPTO_ALG_NEED_FALLBACK,
- .cra_blocksize = SHA512_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct sa_tfm_ctx),
- .cra_module = THIS_MODULE,
- .cra_init = sa_sha512_cra_init,
- .cra_exit = sa_sha_cra_exit,
- },
- .halg.digestsize = SHA512_DIGEST_SIZE,
- .halg.statesize = sizeof(struct sa_sha_req_ctx) +
- sizeof(struct sha512_state),
- .init = sa_sha_init,
- .update = sa_sha_update,
- .final = sa_sha_final,
- .finup = sa_sha_finup,
- .digest = sa_sha_digest,
- .export = sa_sha_export,
- .import = sa_sha_import,
- },
- },
- [SA_ALG_AUTHENC_SHA1_AES] = {
- .type = CRYPTO_ALG_TYPE_AEAD,
- .alg.aead = {
- .base = {
- .cra_name = "authenc(hmac(sha1),cbc(aes))",
- .cra_driver_name =
- "authenc(hmac(sha1),cbc(aes))-sa2ul",
- .cra_blocksize = AES_BLOCK_SIZE,
- .cra_flags = CRYPTO_ALG_TYPE_AEAD |
- CRYPTO_ALG_KERN_DRIVER_ONLY |
- CRYPTO_ALG_ASYNC |
- CRYPTO_ALG_NEED_FALLBACK,
- .cra_ctxsize = sizeof(struct sa_tfm_ctx),
- .cra_module = THIS_MODULE,
- .cra_priority = 3000,
- },
- .ivsize = AES_BLOCK_SIZE,
- .maxauthsize = SHA1_DIGEST_SIZE,
- .init = sa_cra_init_aead_sha1,
- .exit = sa_exit_tfm_aead,
- .setkey = sa_aead_cbc_sha1_setkey,
- .setauthsize = sa_aead_setauthsize,
- .encrypt = sa_aead_encrypt,
- .decrypt = sa_aead_decrypt,
- },
- },
- [SA_ALG_AUTHENC_SHA256_AES] = {
- .type = CRYPTO_ALG_TYPE_AEAD,
- .alg.aead = {
- .base = {
- .cra_name = "authenc(hmac(sha256),cbc(aes))",
- .cra_driver_name =
- "authenc(hmac(sha256),cbc(aes))-sa2ul",
- .cra_blocksize = AES_BLOCK_SIZE,
- .cra_flags = CRYPTO_ALG_TYPE_AEAD |
- CRYPTO_ALG_KERN_DRIVER_ONLY |
- CRYPTO_ALG_ASYNC |
- CRYPTO_ALG_NEED_FALLBACK,
- .cra_ctxsize = sizeof(struct sa_tfm_ctx),
- .cra_module = THIS_MODULE,
- .cra_alignmask = 0,
- .cra_priority = 3000,
- },
- .ivsize = AES_BLOCK_SIZE,
- .maxauthsize = SHA256_DIGEST_SIZE,
- .init = sa_cra_init_aead_sha256,
- .exit = sa_exit_tfm_aead,
- .setkey = sa_aead_cbc_sha256_setkey,
- .setauthsize = sa_aead_setauthsize,
- .encrypt = sa_aead_encrypt,
- .decrypt = sa_aead_decrypt,
- },
- },
- };
- /* Register the algorithms in crypto framework */
- static void sa_register_algos(struct sa_crypto_data *dev_data)
- {
- const struct sa_match_data *match_data = dev_data->match_data;
- struct device *dev = dev_data->dev;
- char *alg_name;
- u32 type;
- int i, err;
- for (i = 0; i < ARRAY_SIZE(sa_algs); i++) {
- /* Skip unsupported algos */
- if (!(match_data->supported_algos & BIT(i)))
- continue;
- type = sa_algs[i].type;
- if (type == CRYPTO_ALG_TYPE_SKCIPHER) {
- alg_name = sa_algs[i].alg.skcipher.base.cra_name;
- err = crypto_register_skcipher(&sa_algs[i].alg.skcipher);
- } else if (type == CRYPTO_ALG_TYPE_AHASH) {
- alg_name = sa_algs[i].alg.ahash.halg.base.cra_name;
- err = crypto_register_ahash(&sa_algs[i].alg.ahash);
- } else if (type == CRYPTO_ALG_TYPE_AEAD) {
- alg_name = sa_algs[i].alg.aead.base.cra_name;
- err = crypto_register_aead(&sa_algs[i].alg.aead);
- } else {
- dev_err(dev,
- "un-supported crypto algorithm (%d)",
- sa_algs[i].type);
- continue;
- }
- if (err)
- dev_err(dev, "Failed to register '%s'\n", alg_name);
- else
- sa_algs[i].registered = true;
- }
- }
- /* Unregister the algorithms in crypto framework */
- static void sa_unregister_algos(const struct device *dev)
- {
- u32 type;
- int i;
- for (i = 0; i < ARRAY_SIZE(sa_algs); i++) {
- type = sa_algs[i].type;
- if (!sa_algs[i].registered)
- continue;
- if (type == CRYPTO_ALG_TYPE_SKCIPHER)
- crypto_unregister_skcipher(&sa_algs[i].alg.skcipher);
- else if (type == CRYPTO_ALG_TYPE_AHASH)
- crypto_unregister_ahash(&sa_algs[i].alg.ahash);
- else if (type == CRYPTO_ALG_TYPE_AEAD)
- crypto_unregister_aead(&sa_algs[i].alg.aead);
- sa_algs[i].registered = false;
- }
- }
- static int sa_init_mem(struct sa_crypto_data *dev_data)
- {
- struct device *dev = &dev_data->pdev->dev;
- /* Setup dma pool for security context buffers */
- dev_data->sc_pool = dma_pool_create("keystone-sc", dev,
- SA_CTX_MAX_SZ, 64, 0);
- if (!dev_data->sc_pool) {
- dev_err(dev, "Failed to create dma pool");
- return -ENOMEM;
- }
- return 0;
- }
- static int sa_dma_init(struct sa_crypto_data *dd)
- {
- int ret;
- struct dma_slave_config cfg;
- dd->dma_rx1 = NULL;
- dd->dma_tx = NULL;
- dd->dma_rx2 = NULL;
- ret = dma_coerce_mask_and_coherent(dd->dev, DMA_BIT_MASK(48));
- if (ret)
- return ret;
- dd->dma_rx1 = dma_request_chan(dd->dev, "rx1");
- if (IS_ERR(dd->dma_rx1))
- return dev_err_probe(dd->dev, PTR_ERR(dd->dma_rx1),
- "Unable to request rx1 DMA channel\n");
- dd->dma_rx2 = dma_request_chan(dd->dev, "rx2");
- if (IS_ERR(dd->dma_rx2)) {
- ret = dev_err_probe(dd->dev, PTR_ERR(dd->dma_rx2),
- "Unable to request rx2 DMA channel\n");
- goto err_dma_rx2;
- }
- dd->dma_tx = dma_request_chan(dd->dev, "tx");
- if (IS_ERR(dd->dma_tx)) {
- ret = dev_err_probe(dd->dev, PTR_ERR(dd->dma_tx),
- "Unable to request tx DMA channel\n");
- goto err_dma_tx;
- }
- memzero_explicit(&cfg, sizeof(cfg));
- cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
- cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
- cfg.src_maxburst = 4;
- cfg.dst_maxburst = 4;
- ret = dmaengine_slave_config(dd->dma_rx1, &cfg);
- if (ret) {
- dev_err(dd->dev, "can't configure IN dmaengine slave: %d\n",
- ret);
- goto err_dma_config;
- }
- ret = dmaengine_slave_config(dd->dma_rx2, &cfg);
- if (ret) {
- dev_err(dd->dev, "can't configure IN dmaengine slave: %d\n",
- ret);
- goto err_dma_config;
- }
- ret = dmaengine_slave_config(dd->dma_tx, &cfg);
- if (ret) {
- dev_err(dd->dev, "can't configure OUT dmaengine slave: %d\n",
- ret);
- goto err_dma_config;
- }
- return 0;
- err_dma_config:
- dma_release_channel(dd->dma_tx);
- err_dma_tx:
- dma_release_channel(dd->dma_rx2);
- err_dma_rx2:
- dma_release_channel(dd->dma_rx1);
- return ret;
- }
- static int sa_link_child(struct device *dev, void *data)
- {
- struct device *parent = data;
- device_link_add(dev, parent, DL_FLAG_AUTOPROBE_CONSUMER);
- return 0;
- }
- static struct sa_match_data am654_match_data = {
- .priv = 1,
- .priv_id = 1,
- .supported_algos = BIT(SA_ALG_CBC_AES) |
- BIT(SA_ALG_EBC_AES) |
- BIT(SA_ALG_CBC_DES3) |
- BIT(SA_ALG_ECB_DES3) |
- BIT(SA_ALG_SHA1) |
- BIT(SA_ALG_SHA256) |
- BIT(SA_ALG_SHA512) |
- BIT(SA_ALG_AUTHENC_SHA1_AES) |
- BIT(SA_ALG_AUTHENC_SHA256_AES),
- };
- static struct sa_match_data am64_match_data = {
- .priv = 0,
- .priv_id = 0,
- .supported_algos = BIT(SA_ALG_CBC_AES) |
- BIT(SA_ALG_EBC_AES) |
- BIT(SA_ALG_SHA256) |
- BIT(SA_ALG_SHA512) |
- BIT(SA_ALG_AUTHENC_SHA256_AES),
- };
- static const struct of_device_id of_match[] = {
- { .compatible = "ti,j721e-sa2ul", .data = &am654_match_data, },
- { .compatible = "ti,am654-sa2ul", .data = &am654_match_data, },
- { .compatible = "ti,am64-sa2ul", .data = &am64_match_data, },
- { .compatible = "ti,am62-sa3ul", .data = &am64_match_data, },
- {},
- };
- MODULE_DEVICE_TABLE(of, of_match);
- static int sa_ul_probe(struct platform_device *pdev)
- {
- struct device *dev = &pdev->dev;
- struct device_node *node = dev->of_node;
- static void __iomem *saul_base;
- struct sa_crypto_data *dev_data;
- u32 status, val;
- int ret;
- dev_data = devm_kzalloc(dev, sizeof(*dev_data), GFP_KERNEL);
- if (!dev_data)
- return -ENOMEM;
- dev_data->match_data = of_device_get_match_data(dev);
- if (!dev_data->match_data)
- return -ENODEV;
- saul_base = devm_platform_ioremap_resource(pdev, 0);
- if (IS_ERR(saul_base))
- return PTR_ERR(saul_base);
- sa_k3_dev = dev;
- dev_data->dev = dev;
- dev_data->pdev = pdev;
- dev_data->base = saul_base;
- platform_set_drvdata(pdev, dev_data);
- dev_set_drvdata(sa_k3_dev, dev_data);
- pm_runtime_enable(dev);
- ret = pm_runtime_resume_and_get(dev);
- if (ret < 0) {
- dev_err(dev, "%s: failed to get sync: %d\n", __func__, ret);
- pm_runtime_disable(dev);
- return ret;
- }
- sa_init_mem(dev_data);
- ret = sa_dma_init(dev_data);
- if (ret)
- goto destroy_dma_pool;
- spin_lock_init(&dev_data->scid_lock);
- val = SA_EEC_ENCSS_EN | SA_EEC_AUTHSS_EN | SA_EEC_CTXCACH_EN |
- SA_EEC_CPPI_PORT_IN_EN | SA_EEC_CPPI_PORT_OUT_EN |
- SA_EEC_TRNG_EN;
- status = readl_relaxed(saul_base + SA_ENGINE_STATUS);
- /* Only enable engines if all are not already enabled */
- if (val & ~status)
- writel_relaxed(val, saul_base + SA_ENGINE_ENABLE_CONTROL);
- sa_register_algos(dev_data);
- ret = of_platform_populate(node, NULL, NULL, dev);
- if (ret)
- goto release_dma;
- device_for_each_child(dev, dev, sa_link_child);
- return 0;
- release_dma:
- sa_unregister_algos(dev);
- dma_release_channel(dev_data->dma_rx2);
- dma_release_channel(dev_data->dma_rx1);
- dma_release_channel(dev_data->dma_tx);
- destroy_dma_pool:
- dma_pool_destroy(dev_data->sc_pool);
- pm_runtime_put_sync(dev);
- pm_runtime_disable(dev);
- return ret;
- }
- static int sa_ul_remove(struct platform_device *pdev)
- {
- struct sa_crypto_data *dev_data = platform_get_drvdata(pdev);
- of_platform_depopulate(&pdev->dev);
- sa_unregister_algos(&pdev->dev);
- dma_release_channel(dev_data->dma_rx2);
- dma_release_channel(dev_data->dma_rx1);
- dma_release_channel(dev_data->dma_tx);
- dma_pool_destroy(dev_data->sc_pool);
- platform_set_drvdata(pdev, NULL);
- pm_runtime_put_sync(&pdev->dev);
- pm_runtime_disable(&pdev->dev);
- return 0;
- }
- static struct platform_driver sa_ul_driver = {
- .probe = sa_ul_probe,
- .remove = sa_ul_remove,
- .driver = {
- .name = "saul-crypto",
- .of_match_table = of_match,
- },
- };
- module_platform_driver(sa_ul_driver);
- MODULE_LICENSE("GPL v2");
|