test_bpf.c 356 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748874987508751875287538754875587568757875887598760876187628763876487658766876787688769877087718772877387748775877687778778877987808781878287838784878587868787878887898790879187928793879487958796879787988799880088018802880388048805880688078808880988108811881288138814881588168817881888198820882188228823882488258826882788288829883088318832883388348835883688378838883988408841884288438844884588468847884888498850885188528853885488558856885788588859886088618862886388648865886688678868886988708871887288738874887588768877887888798880888188828883888488858886888788888889889088918892889388948895889688978898889989008901890289038904890589068907890889098910891189128913891489158916891789188919892089218922892389248925892689278928892989308931893289338934893589368937893889398940894189428943894489458946894789488949895089518952895389548955895689578958895989608961896289638964896589668967896889698970897189728973897489758976897789788979898089818982898389848985898689878988898989908991899289938994899589968997899889999000900190029003900490059006900790089009901090119012901390149015901690179018901990209021902290239024902590269027902890299030903190329033903490359036903790389039904090419042904390449045904690479048904990509051905290539054905590569057905890599060906190629063906490659066906790689069907090719072907390749075907690779078907990809081908290839084908590869087908890899090909190929093909490959096909790989099910091019102910391049105910691079108910991109111911291139114911591169117911891199120912191229123912491259126912791289129913091319132913391349135913691379138913991409141914291439144914591469147914891499150915191529153915491559156915791589159916091619162916391649165916691679168916991709171917291739174917591769177917891799180918191829183918491859186918791889189919091919192919391949195919691979198919992009201920292039204920592069207920892099210921192129213921492159216921792189219922092219222922392249225922692279228922992309231923292339234923592369237923892399240924192429243924492459246924792489249925092519252925392549255925692579258925992609261926292639264926592669267926892699270927192729273927492759276927792789279928092819282928392849285928692879288928992909291929292939294929592969297929892999300930193029303930493059306930793089309931093119312931393149315931693179318931993209321932293239324932593269327932893299330933193329333933493359336933793389339934093419342934393449345934693479348934993509351935293539354935593569357935893599360936193629363936493659366936793689369937093719372937393749375937693779378937993809381938293839384938593869387938893899390939193929393939493959396939793989399940094019402940394049405940694079408940994109411941294139414941594169417941894199420942194229423942494259426942794289429943094319432943394349435943694379438943994409441944294439444944594469447944894499450945194529453945494559456945794589459946094619462946394649465946694679468946994709471947294739474947594769477947894799480948194829483948494859486948794889489949094919492949394949495949694979498949995009501950295039504950595069507950895099510951195129513951495159516951795189519952095219522952395249525952695279528952995309531953295339534953595369537953895399540954195429543954495459546954795489549955095519552955395549555955695579558955995609561956295639564956595669567956895699570957195729573957495759576957795789579958095819582958395849585958695879588958995909591959295939594959595969597959895999600960196029603960496059606960796089609961096119612961396149615961696179618961996209621962296239624962596269627962896299630963196329633963496359636963796389639964096419642964396449645964696479648964996509651965296539654965596569657965896599660966196629663966496659666966796689669967096719672967396749675967696779678967996809681968296839684968596869687968896899690969196929693969496959696969796989699970097019702970397049705970697079708970997109711971297139714971597169717971897199720972197229723972497259726972797289729973097319732973397349735973697379738973997409741974297439744974597469747974897499750975197529753975497559756975797589759976097619762976397649765976697679768976997709771977297739774977597769777977897799780978197829783978497859786978797889789979097919792979397949795979697979798979998009801980298039804980598069807980898099810981198129813981498159816981798189819982098219822982398249825982698279828982998309831983298339834983598369837983898399840984198429843984498459846984798489849985098519852985398549855985698579858985998609861986298639864986598669867986898699870987198729873987498759876987798789879988098819882988398849885988698879888988998909891989298939894989598969897989898999900990199029903990499059906990799089909991099119912991399149915991699179918991999209921992299239924992599269927992899299930993199329933993499359936993799389939994099419942994399449945994699479948994999509951995299539954995599569957995899599960996199629963996499659966996799689969997099719972997399749975997699779978997999809981998299839984998599869987998899899990999199929993999499959996999799989999100001000110002100031000410005100061000710008100091001010011100121001310014100151001610017100181001910020100211002210023100241002510026100271002810029100301003110032100331003410035100361003710038100391004010041100421004310044100451004610047100481004910050100511005210053100541005510056100571005810059100601006110062100631006410065100661006710068100691007010071100721007310074100751007610077100781007910080100811008210083100841008510086100871008810089100901009110092100931009410095100961009710098100991010010101101021010310104101051010610107101081010910110101111011210113101141011510116101171011810119101201012110122101231012410125101261012710128101291013010131101321013310134101351013610137101381013910140101411014210143101441014510146101471014810149101501015110152101531015410155101561015710158101591016010161101621016310164101651016610167101681016910170101711017210173101741017510176101771017810179101801018110182101831018410185101861018710188101891019010191101921019310194101951019610197101981019910200102011020210203102041020510206102071020810209102101021110212102131021410215102161021710218102191022010221102221022310224102251022610227102281022910230102311023210233102341023510236102371023810239102401024110242102431024410245102461024710248102491025010251102521025310254102551025610257102581025910260102611026210263102641026510266102671026810269102701027110272102731027410275102761027710278102791028010281102821028310284102851028610287102881028910290102911029210293102941029510296102971029810299103001030110302103031030410305103061030710308103091031010311103121031310314103151031610317103181031910320103211032210323103241032510326103271032810329103301033110332103331033410335103361033710338103391034010341103421034310344103451034610347103481034910350103511035210353103541035510356103571035810359103601036110362103631036410365103661036710368103691037010371103721037310374103751037610377103781037910380103811038210383103841038510386103871038810389103901039110392103931039410395103961039710398103991040010401104021040310404104051040610407104081040910410104111041210413104141041510416104171041810419104201042110422104231042410425104261042710428104291043010431104321043310434104351043610437104381043910440104411044210443104441044510446104471044810449104501045110452104531045410455104561045710458104591046010461104621046310464104651046610467104681046910470104711047210473104741047510476104771047810479104801048110482104831048410485104861048710488104891049010491104921049310494104951049610497104981049910500105011050210503105041050510506105071050810509105101051110512105131051410515105161051710518105191052010521105221052310524105251052610527105281052910530105311053210533105341053510536105371053810539105401054110542105431054410545105461054710548105491055010551105521055310554105551055610557105581055910560105611056210563105641056510566105671056810569105701057110572105731057410575105761057710578105791058010581105821058310584105851058610587105881058910590105911059210593105941059510596105971059810599106001060110602106031060410605106061060710608106091061010611106121061310614106151061610617106181061910620106211062210623106241062510626106271062810629106301063110632106331063410635106361063710638106391064010641106421064310644106451064610647106481064910650106511065210653106541065510656106571065810659106601066110662106631066410665106661066710668106691067010671106721067310674106751067610677106781067910680106811068210683106841068510686106871068810689106901069110692106931069410695106961069710698106991070010701107021070310704107051070610707107081070910710107111071210713107141071510716107171071810719107201072110722107231072410725107261072710728107291073010731107321073310734107351073610737107381073910740107411074210743107441074510746107471074810749107501075110752107531075410755107561075710758107591076010761107621076310764107651076610767107681076910770107711077210773107741077510776107771077810779107801078110782107831078410785107861078710788107891079010791107921079310794107951079610797107981079910800108011080210803108041080510806108071080810809108101081110812108131081410815108161081710818108191082010821108221082310824108251082610827108281082910830108311083210833108341083510836108371083810839108401084110842108431084410845108461084710848108491085010851108521085310854108551085610857108581085910860108611086210863108641086510866108671086810869108701087110872108731087410875108761087710878108791088010881108821088310884108851088610887108881088910890108911089210893108941089510896108971089810899109001090110902109031090410905109061090710908109091091010911109121091310914109151091610917109181091910920109211092210923109241092510926109271092810929109301093110932109331093410935109361093710938109391094010941109421094310944109451094610947109481094910950109511095210953109541095510956109571095810959109601096110962109631096410965109661096710968109691097010971109721097310974109751097610977109781097910980109811098210983109841098510986109871098810989109901099110992109931099410995109961099710998109991100011001110021100311004110051100611007110081100911010110111101211013110141101511016110171101811019110201102111022110231102411025110261102711028110291103011031110321103311034110351103611037110381103911040110411104211043110441104511046110471104811049110501105111052110531105411055110561105711058110591106011061110621106311064110651106611067110681106911070110711107211073110741107511076110771107811079110801108111082110831108411085110861108711088110891109011091110921109311094110951109611097110981109911100111011110211103111041110511106111071110811109111101111111112111131111411115111161111711118111191112011121111221112311124111251112611127111281112911130111311113211133111341113511136111371113811139111401114111142111431114411145111461114711148111491115011151111521115311154111551115611157111581115911160111611116211163111641116511166111671116811169111701117111172111731117411175111761117711178111791118011181111821118311184111851118611187111881118911190111911119211193111941119511196111971119811199112001120111202112031120411205112061120711208112091121011211112121121311214112151121611217112181121911220112211122211223112241122511226112271122811229112301123111232112331123411235112361123711238112391124011241112421124311244112451124611247112481124911250112511125211253112541125511256112571125811259112601126111262112631126411265112661126711268112691127011271112721127311274112751127611277112781127911280112811128211283112841128511286112871128811289112901129111292112931129411295112961129711298112991130011301113021130311304113051130611307113081130911310113111131211313113141131511316113171131811319113201132111322113231132411325113261132711328113291133011331113321133311334113351133611337113381133911340113411134211343113441134511346113471134811349113501135111352113531135411355113561135711358113591136011361113621136311364113651136611367113681136911370113711137211373113741137511376113771137811379113801138111382113831138411385113861138711388113891139011391113921139311394113951139611397113981139911400114011140211403114041140511406114071140811409114101141111412114131141411415114161141711418114191142011421114221142311424114251142611427114281142911430114311143211433114341143511436114371143811439114401144111442114431144411445114461144711448114491145011451114521145311454114551145611457114581145911460114611146211463114641146511466114671146811469114701147111472114731147411475114761147711478114791148011481114821148311484114851148611487114881148911490114911149211493114941149511496114971149811499115001150111502115031150411505115061150711508115091151011511115121151311514115151151611517115181151911520115211152211523115241152511526115271152811529115301153111532115331153411535115361153711538115391154011541115421154311544115451154611547115481154911550115511155211553115541155511556115571155811559115601156111562115631156411565115661156711568115691157011571115721157311574115751157611577115781157911580115811158211583115841158511586115871158811589115901159111592115931159411595115961159711598115991160011601116021160311604116051160611607116081160911610116111161211613116141161511616116171161811619116201162111622116231162411625116261162711628116291163011631116321163311634116351163611637116381163911640116411164211643116441164511646116471164811649116501165111652116531165411655116561165711658116591166011661116621166311664116651166611667116681166911670116711167211673116741167511676116771167811679116801168111682116831168411685116861168711688116891169011691116921169311694116951169611697116981169911700117011170211703117041170511706117071170811709117101171111712117131171411715117161171711718117191172011721117221172311724117251172611727117281172911730117311173211733117341173511736117371173811739117401174111742117431174411745117461174711748117491175011751117521175311754117551175611757117581175911760117611176211763117641176511766117671176811769117701177111772117731177411775117761177711778117791178011781117821178311784117851178611787117881178911790117911179211793117941179511796117971179811799118001180111802118031180411805118061180711808118091181011811118121181311814118151181611817118181181911820118211182211823118241182511826118271182811829118301183111832118331183411835118361183711838118391184011841118421184311844118451184611847118481184911850118511185211853118541185511856118571185811859118601186111862118631186411865118661186711868118691187011871118721187311874118751187611877118781187911880118811188211883118841188511886118871188811889118901189111892118931189411895118961189711898118991190011901119021190311904119051190611907119081190911910119111191211913119141191511916119171191811919119201192111922119231192411925119261192711928119291193011931119321193311934119351193611937119381193911940119411194211943119441194511946119471194811949119501195111952119531195411955119561195711958119591196011961119621196311964119651196611967119681196911970119711197211973119741197511976119771197811979119801198111982119831198411985119861198711988119891199011991119921199311994119951199611997119981199912000120011200212003120041200512006120071200812009120101201112012120131201412015120161201712018120191202012021120221202312024120251202612027120281202912030120311203212033120341203512036120371203812039120401204112042120431204412045120461204712048120491205012051120521205312054120551205612057120581205912060120611206212063120641206512066120671206812069120701207112072120731207412075120761207712078120791208012081120821208312084120851208612087120881208912090120911209212093120941209512096120971209812099121001210112102121031210412105121061210712108121091211012111121121211312114121151211612117121181211912120121211212212123121241212512126121271212812129121301213112132121331213412135121361213712138121391214012141121421214312144121451214612147121481214912150121511215212153121541215512156121571215812159121601216112162121631216412165121661216712168121691217012171121721217312174121751217612177121781217912180121811218212183121841218512186121871218812189121901219112192121931219412195121961219712198121991220012201122021220312204122051220612207122081220912210122111221212213122141221512216122171221812219122201222112222122231222412225122261222712228122291223012231122321223312234122351223612237122381223912240122411224212243122441224512246122471224812249122501225112252122531225412255122561225712258122591226012261122621226312264122651226612267122681226912270122711227212273122741227512276122771227812279122801228112282122831228412285122861228712288122891229012291122921229312294122951229612297122981229912300123011230212303123041230512306123071230812309123101231112312123131231412315123161231712318123191232012321123221232312324123251232612327123281232912330123311233212333123341233512336123371233812339123401234112342123431234412345123461234712348123491235012351123521235312354123551235612357123581235912360123611236212363123641236512366123671236812369123701237112372123731237412375123761237712378123791238012381123821238312384123851238612387123881238912390123911239212393123941239512396123971239812399124001240112402124031240412405124061240712408124091241012411124121241312414124151241612417124181241912420124211242212423124241242512426124271242812429124301243112432124331243412435124361243712438124391244012441124421244312444124451244612447124481244912450124511245212453124541245512456124571245812459124601246112462124631246412465124661246712468124691247012471124721247312474124751247612477124781247912480124811248212483124841248512486124871248812489124901249112492124931249412495124961249712498124991250012501125021250312504125051250612507125081250912510125111251212513125141251512516125171251812519125201252112522125231252412525125261252712528125291253012531125321253312534125351253612537125381253912540125411254212543125441254512546125471254812549125501255112552125531255412555125561255712558125591256012561125621256312564125651256612567125681256912570125711257212573125741257512576125771257812579125801258112582125831258412585125861258712588125891259012591125921259312594125951259612597125981259912600126011260212603126041260512606126071260812609126101261112612126131261412615126161261712618126191262012621126221262312624126251262612627126281262912630126311263212633126341263512636126371263812639126401264112642126431264412645126461264712648126491265012651126521265312654126551265612657126581265912660126611266212663126641266512666126671266812669126701267112672126731267412675126761267712678126791268012681126821268312684126851268612687126881268912690126911269212693126941269512696126971269812699127001270112702127031270412705127061270712708127091271012711127121271312714127151271612717127181271912720127211272212723127241272512726127271272812729127301273112732127331273412735127361273712738127391274012741127421274312744127451274612747127481274912750127511275212753127541275512756127571275812759127601276112762127631276412765127661276712768127691277012771127721277312774127751277612777127781277912780127811278212783127841278512786127871278812789127901279112792127931279412795127961279712798127991280012801128021280312804128051280612807128081280912810128111281212813128141281512816128171281812819128201282112822128231282412825128261282712828128291283012831128321283312834128351283612837128381283912840128411284212843128441284512846128471284812849128501285112852128531285412855128561285712858128591286012861128621286312864128651286612867128681286912870128711287212873128741287512876128771287812879128801288112882128831288412885128861288712888128891289012891128921289312894128951289612897128981289912900129011290212903129041290512906129071290812909129101291112912129131291412915129161291712918129191292012921129221292312924129251292612927129281292912930129311293212933129341293512936129371293812939129401294112942129431294412945129461294712948129491295012951129521295312954129551295612957129581295912960129611296212963129641296512966129671296812969129701297112972129731297412975129761297712978129791298012981129821298312984129851298612987129881298912990129911299212993129941299512996129971299812999130001300113002130031300413005130061300713008130091301013011130121301313014130151301613017130181301913020130211302213023130241302513026130271302813029130301303113032130331303413035130361303713038130391304013041130421304313044130451304613047130481304913050130511305213053130541305513056130571305813059130601306113062130631306413065130661306713068130691307013071130721307313074130751307613077130781307913080130811308213083130841308513086130871308813089130901309113092130931309413095130961309713098130991310013101131021310313104131051310613107131081310913110131111311213113131141311513116131171311813119131201312113122131231312413125131261312713128131291313013131131321313313134131351313613137131381313913140131411314213143131441314513146131471314813149131501315113152131531315413155131561315713158131591316013161131621316313164131651316613167131681316913170131711317213173131741317513176131771317813179131801318113182131831318413185131861318713188131891319013191131921319313194131951319613197131981319913200132011320213203132041320513206132071320813209132101321113212132131321413215132161321713218132191322013221132221322313224132251322613227132281322913230132311323213233132341323513236132371323813239132401324113242132431324413245132461324713248132491325013251132521325313254132551325613257132581325913260132611326213263132641326513266132671326813269132701327113272132731327413275132761327713278132791328013281132821328313284132851328613287132881328913290132911329213293132941329513296132971329813299133001330113302133031330413305133061330713308133091331013311133121331313314133151331613317133181331913320133211332213323133241332513326133271332813329133301333113332133331333413335133361333713338133391334013341133421334313344133451334613347133481334913350133511335213353133541335513356133571335813359133601336113362133631336413365133661336713368133691337013371133721337313374133751337613377133781337913380133811338213383133841338513386133871338813389133901339113392133931339413395133961339713398133991340013401134021340313404134051340613407134081340913410134111341213413134141341513416134171341813419134201342113422134231342413425134261342713428134291343013431134321343313434134351343613437134381343913440134411344213443134441344513446134471344813449134501345113452134531345413455134561345713458134591346013461134621346313464134651346613467134681346913470134711347213473134741347513476134771347813479134801348113482134831348413485134861348713488134891349013491134921349313494134951349613497134981349913500135011350213503135041350513506135071350813509135101351113512135131351413515135161351713518135191352013521135221352313524135251352613527135281352913530135311353213533135341353513536135371353813539135401354113542135431354413545135461354713548135491355013551135521355313554135551355613557135581355913560135611356213563135641356513566135671356813569135701357113572135731357413575135761357713578135791358013581135821358313584135851358613587135881358913590135911359213593135941359513596135971359813599136001360113602136031360413605136061360713608136091361013611136121361313614136151361613617136181361913620136211362213623136241362513626136271362813629136301363113632136331363413635136361363713638136391364013641136421364313644136451364613647136481364913650136511365213653136541365513656136571365813659136601366113662136631366413665136661366713668136691367013671136721367313674136751367613677136781367913680136811368213683136841368513686136871368813689136901369113692136931369413695136961369713698136991370013701137021370313704137051370613707137081370913710137111371213713137141371513716137171371813719137201372113722137231372413725137261372713728137291373013731137321373313734137351373613737137381373913740137411374213743137441374513746137471374813749137501375113752137531375413755137561375713758137591376013761137621376313764137651376613767137681376913770137711377213773137741377513776137771377813779137801378113782137831378413785137861378713788137891379013791137921379313794137951379613797137981379913800138011380213803138041380513806138071380813809138101381113812138131381413815138161381713818138191382013821138221382313824138251382613827138281382913830138311383213833138341383513836138371383813839138401384113842138431384413845138461384713848138491385013851138521385313854138551385613857138581385913860138611386213863138641386513866138671386813869138701387113872138731387413875138761387713878138791388013881138821388313884138851388613887138881388913890138911389213893138941389513896138971389813899139001390113902139031390413905139061390713908139091391013911139121391313914139151391613917139181391913920139211392213923139241392513926139271392813929139301393113932139331393413935139361393713938139391394013941139421394313944139451394613947139481394913950139511395213953139541395513956139571395813959139601396113962139631396413965139661396713968139691397013971139721397313974139751397613977139781397913980139811398213983139841398513986139871398813989139901399113992139931399413995139961399713998139991400014001140021400314004140051400614007140081400914010140111401214013140141401514016140171401814019140201402114022140231402414025140261402714028140291403014031140321403314034140351403614037140381403914040140411404214043140441404514046140471404814049140501405114052140531405414055140561405714058140591406014061140621406314064140651406614067140681406914070140711407214073140741407514076140771407814079140801408114082140831408414085140861408714088140891409014091140921409314094140951409614097140981409914100141011410214103141041410514106141071410814109141101411114112141131411414115141161411714118141191412014121141221412314124141251412614127141281412914130141311413214133141341413514136141371413814139141401414114142141431414414145141461414714148141491415014151141521415314154141551415614157141581415914160141611416214163141641416514166141671416814169141701417114172141731417414175141761417714178141791418014181141821418314184141851418614187141881418914190141911419214193141941419514196141971419814199142001420114202142031420414205142061420714208142091421014211142121421314214142151421614217142181421914220142211422214223142241422514226142271422814229142301423114232142331423414235142361423714238142391424014241142421424314244142451424614247142481424914250142511425214253142541425514256142571425814259142601426114262142631426414265142661426714268142691427014271142721427314274142751427614277142781427914280142811428214283142841428514286142871428814289142901429114292142931429414295142961429714298142991430014301143021430314304143051430614307143081430914310143111431214313143141431514316143171431814319143201432114322143231432414325143261432714328143291433014331143321433314334143351433614337143381433914340143411434214343143441434514346143471434814349143501435114352143531435414355143561435714358143591436014361143621436314364143651436614367143681436914370143711437214373143741437514376143771437814379143801438114382143831438414385143861438714388143891439014391143921439314394143951439614397143981439914400144011440214403144041440514406144071440814409144101441114412144131441414415144161441714418144191442014421144221442314424144251442614427144281442914430144311443214433144341443514436144371443814439144401444114442144431444414445144461444714448144491445014451144521445314454144551445614457144581445914460144611446214463144641446514466144671446814469144701447114472144731447414475144761447714478144791448014481144821448314484144851448614487144881448914490144911449214493144941449514496144971449814499145001450114502145031450414505145061450714508145091451014511145121451314514145151451614517145181451914520145211452214523145241452514526145271452814529145301453114532145331453414535145361453714538145391454014541145421454314544145451454614547145481454914550145511455214553145541455514556145571455814559145601456114562145631456414565145661456714568145691457014571145721457314574145751457614577145781457914580145811458214583145841458514586145871458814589145901459114592145931459414595145961459714598145991460014601146021460314604146051460614607146081460914610146111461214613146141461514616146171461814619146201462114622146231462414625146261462714628146291463014631146321463314634146351463614637146381463914640146411464214643146441464514646146471464814649146501465114652146531465414655146561465714658146591466014661146621466314664146651466614667146681466914670146711467214673146741467514676146771467814679146801468114682146831468414685146861468714688146891469014691146921469314694146951469614697146981469914700147011470214703147041470514706147071470814709147101471114712147131471414715147161471714718147191472014721147221472314724147251472614727147281472914730147311473214733147341473514736147371473814739147401474114742147431474414745147461474714748147491475014751147521475314754147551475614757147581475914760147611476214763147641476514766147671476814769147701477114772147731477414775147761477714778147791478014781147821478314784147851478614787147881478914790147911479214793147941479514796147971479814799148001480114802148031480414805148061480714808148091481014811148121481314814148151481614817148181481914820148211482214823148241482514826148271482814829148301483114832148331483414835148361483714838148391484014841148421484314844148451484614847148481484914850148511485214853148541485514856148571485814859148601486114862148631486414865148661486714868148691487014871148721487314874148751487614877148781487914880148811488214883148841488514886148871488814889148901489114892148931489414895148961489714898148991490014901149021490314904149051490614907149081490914910149111491214913149141491514916149171491814919149201492114922149231492414925149261492714928149291493014931149321493314934149351493614937149381493914940149411494214943149441494514946149471494814949149501495114952149531495414955149561495714958149591496014961149621496314964149651496614967149681496914970149711497214973149741497514976149771497814979149801498114982149831498414985149861498714988149891499014991149921499314994149951499614997149981499915000150011500215003150041500515006150071500815009150101501115012150131501415015150161501715018150191502015021150221502315024150251502615027150281502915030150311503215033150341503515036150371503815039150401504115042150431504415045150461504715048150491505015051150521505315054150551505615057150581505915060150611506215063150641506515066150671506815069150701507115072150731507415075150761507715078150791508015081150821508315084150851508615087150881508915090150911509215093150941509515096150971509815099151001510115102151031510415105151061510715108151091511015111151121511315114151151511615117151181511915120151211512215123151241512515126151271512815129151301513115132151331513415135151361513715138151391514015141151421514315144151451514615147151481514915150151511515215153151541515515156151571515815159151601516115162151631516415165151661516715168151691517015171151721517315174151751517615177151781517915180151811518215183151841518515186151871518815189151901519115192151931519415195151961519715198151991520015201152021520315204152051520615207152081520915210152111521215213152141521515216152171521815219152201522115222152231522415225152261522715228152291523015231152321523315234152351523615237152381523915240152411524215243152441524515246152471524815249152501525115252152531525415255152561525715258152591526015261152621526315264152651526615267152681526915270152711527215273152741527515276152771527815279152801528115282152831528415285152861528715288152891529015291152921529315294152951529615297152981529915300153011530215303153041530515306153071530815309153101531115312153131531415315153161531715318153191532015321153221532315324153251532615327153281532915330153311533215333153341533515336153371533815339153401534115342153431534415345153461534715348153491535015351153521535315354153551535615357153581535915360153611536215363153641536515366
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * Testsuite for BPF interpreter and BPF JIT compiler
  4. *
  5. * Copyright (c) 2011-2014 PLUMgrid, http://plumgrid.com
  6. */
  7. #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
  8. #include <linux/init.h>
  9. #include <linux/module.h>
  10. #include <linux/filter.h>
  11. #include <linux/bpf.h>
  12. #include <linux/skbuff.h>
  13. #include <linux/netdevice.h>
  14. #include <linux/if_vlan.h>
  15. #include <linux/random.h>
  16. #include <linux/highmem.h>
  17. #include <linux/sched.h>
  18. /* General test specific settings */
  19. #define MAX_SUBTESTS 3
  20. #define MAX_TESTRUNS 1000
  21. #define MAX_DATA 128
  22. #define MAX_INSNS 512
  23. #define MAX_K 0xffffFFFF
  24. /* Few constants used to init test 'skb' */
  25. #define SKB_TYPE 3
  26. #define SKB_MARK 0x1234aaaa
  27. #define SKB_HASH 0x1234aaab
  28. #define SKB_QUEUE_MAP 123
  29. #define SKB_VLAN_TCI 0xffff
  30. #define SKB_VLAN_PRESENT 1
  31. #define SKB_DEV_IFINDEX 577
  32. #define SKB_DEV_TYPE 588
  33. /* Redefine REGs to make tests less verbose */
  34. #define R0 BPF_REG_0
  35. #define R1 BPF_REG_1
  36. #define R2 BPF_REG_2
  37. #define R3 BPF_REG_3
  38. #define R4 BPF_REG_4
  39. #define R5 BPF_REG_5
  40. #define R6 BPF_REG_6
  41. #define R7 BPF_REG_7
  42. #define R8 BPF_REG_8
  43. #define R9 BPF_REG_9
  44. #define R10 BPF_REG_10
  45. /* Flags that can be passed to test cases */
  46. #define FLAG_NO_DATA BIT(0)
  47. #define FLAG_EXPECTED_FAIL BIT(1)
  48. #define FLAG_SKB_FRAG BIT(2)
  49. #define FLAG_VERIFIER_ZEXT BIT(3)
  50. #define FLAG_LARGE_MEM BIT(4)
  51. enum {
  52. CLASSIC = BIT(6), /* Old BPF instructions only. */
  53. INTERNAL = BIT(7), /* Extended instruction set. */
  54. };
  55. #define TEST_TYPE_MASK (CLASSIC | INTERNAL)
  56. struct bpf_test {
  57. const char *descr;
  58. union {
  59. struct sock_filter insns[MAX_INSNS];
  60. struct bpf_insn insns_int[MAX_INSNS];
  61. struct {
  62. void *insns;
  63. unsigned int len;
  64. } ptr;
  65. } u;
  66. __u8 aux;
  67. __u8 data[MAX_DATA];
  68. struct {
  69. int data_size;
  70. __u32 result;
  71. } test[MAX_SUBTESTS];
  72. int (*fill_helper)(struct bpf_test *self);
  73. int expected_errcode; /* used when FLAG_EXPECTED_FAIL is set in the aux */
  74. __u8 frag_data[MAX_DATA];
  75. int stack_depth; /* for eBPF only, since tests don't call verifier */
  76. int nr_testruns; /* Custom run count, defaults to MAX_TESTRUNS if 0 */
  77. };
  78. /* Large test cases need separate allocation and fill handler. */
  79. static int bpf_fill_maxinsns1(struct bpf_test *self)
  80. {
  81. unsigned int len = BPF_MAXINSNS;
  82. struct sock_filter *insn;
  83. __u32 k = ~0;
  84. int i;
  85. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  86. if (!insn)
  87. return -ENOMEM;
  88. for (i = 0; i < len; i++, k--)
  89. insn[i] = __BPF_STMT(BPF_RET | BPF_K, k);
  90. self->u.ptr.insns = insn;
  91. self->u.ptr.len = len;
  92. return 0;
  93. }
  94. static int bpf_fill_maxinsns2(struct bpf_test *self)
  95. {
  96. unsigned int len = BPF_MAXINSNS;
  97. struct sock_filter *insn;
  98. int i;
  99. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  100. if (!insn)
  101. return -ENOMEM;
  102. for (i = 0; i < len; i++)
  103. insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
  104. self->u.ptr.insns = insn;
  105. self->u.ptr.len = len;
  106. return 0;
  107. }
  108. static int bpf_fill_maxinsns3(struct bpf_test *self)
  109. {
  110. unsigned int len = BPF_MAXINSNS;
  111. struct sock_filter *insn;
  112. struct rnd_state rnd;
  113. int i;
  114. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  115. if (!insn)
  116. return -ENOMEM;
  117. prandom_seed_state(&rnd, 3141592653589793238ULL);
  118. for (i = 0; i < len - 1; i++) {
  119. __u32 k = prandom_u32_state(&rnd);
  120. insn[i] = __BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, k);
  121. }
  122. insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
  123. self->u.ptr.insns = insn;
  124. self->u.ptr.len = len;
  125. return 0;
  126. }
  127. static int bpf_fill_maxinsns4(struct bpf_test *self)
  128. {
  129. unsigned int len = BPF_MAXINSNS + 1;
  130. struct sock_filter *insn;
  131. int i;
  132. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  133. if (!insn)
  134. return -ENOMEM;
  135. for (i = 0; i < len; i++)
  136. insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
  137. self->u.ptr.insns = insn;
  138. self->u.ptr.len = len;
  139. return 0;
  140. }
  141. static int bpf_fill_maxinsns5(struct bpf_test *self)
  142. {
  143. unsigned int len = BPF_MAXINSNS;
  144. struct sock_filter *insn;
  145. int i;
  146. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  147. if (!insn)
  148. return -ENOMEM;
  149. insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
  150. for (i = 1; i < len - 1; i++)
  151. insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
  152. insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
  153. self->u.ptr.insns = insn;
  154. self->u.ptr.len = len;
  155. return 0;
  156. }
  157. static int bpf_fill_maxinsns6(struct bpf_test *self)
  158. {
  159. unsigned int len = BPF_MAXINSNS;
  160. struct sock_filter *insn;
  161. int i;
  162. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  163. if (!insn)
  164. return -ENOMEM;
  165. for (i = 0; i < len - 1; i++)
  166. insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
  167. SKF_AD_VLAN_TAG_PRESENT);
  168. insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
  169. self->u.ptr.insns = insn;
  170. self->u.ptr.len = len;
  171. return 0;
  172. }
  173. static int bpf_fill_maxinsns7(struct bpf_test *self)
  174. {
  175. unsigned int len = BPF_MAXINSNS;
  176. struct sock_filter *insn;
  177. int i;
  178. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  179. if (!insn)
  180. return -ENOMEM;
  181. for (i = 0; i < len - 4; i++)
  182. insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
  183. SKF_AD_CPU);
  184. insn[len - 4] = __BPF_STMT(BPF_MISC | BPF_TAX, 0);
  185. insn[len - 3] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
  186. SKF_AD_CPU);
  187. insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0);
  188. insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
  189. self->u.ptr.insns = insn;
  190. self->u.ptr.len = len;
  191. return 0;
  192. }
  193. static int bpf_fill_maxinsns8(struct bpf_test *self)
  194. {
  195. unsigned int len = BPF_MAXINSNS;
  196. struct sock_filter *insn;
  197. int i, jmp_off = len - 3;
  198. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  199. if (!insn)
  200. return -ENOMEM;
  201. insn[0] = __BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff);
  202. for (i = 1; i < len - 1; i++)
  203. insn[i] = __BPF_JUMP(BPF_JMP | BPF_JGT, 0xffffffff, jmp_off--, 0);
  204. insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
  205. self->u.ptr.insns = insn;
  206. self->u.ptr.len = len;
  207. return 0;
  208. }
  209. static int bpf_fill_maxinsns9(struct bpf_test *self)
  210. {
  211. unsigned int len = BPF_MAXINSNS;
  212. struct bpf_insn *insn;
  213. int i;
  214. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  215. if (!insn)
  216. return -ENOMEM;
  217. insn[0] = BPF_JMP_IMM(BPF_JA, 0, 0, len - 2);
  218. insn[1] = BPF_ALU32_IMM(BPF_MOV, R0, 0xcbababab);
  219. insn[2] = BPF_EXIT_INSN();
  220. for (i = 3; i < len - 2; i++)
  221. insn[i] = BPF_ALU32_IMM(BPF_MOV, R0, 0xfefefefe);
  222. insn[len - 2] = BPF_EXIT_INSN();
  223. insn[len - 1] = BPF_JMP_IMM(BPF_JA, 0, 0, -(len - 1));
  224. self->u.ptr.insns = insn;
  225. self->u.ptr.len = len;
  226. return 0;
  227. }
  228. static int bpf_fill_maxinsns10(struct bpf_test *self)
  229. {
  230. unsigned int len = BPF_MAXINSNS, hlen = len - 2;
  231. struct bpf_insn *insn;
  232. int i;
  233. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  234. if (!insn)
  235. return -ENOMEM;
  236. for (i = 0; i < hlen / 2; i++)
  237. insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 2 - 2 * i);
  238. for (i = hlen - 1; i > hlen / 2; i--)
  239. insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 1 - 2 * i);
  240. insn[hlen / 2] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen / 2 - 1);
  241. insn[hlen] = BPF_ALU32_IMM(BPF_MOV, R0, 0xabababac);
  242. insn[hlen + 1] = BPF_EXIT_INSN();
  243. self->u.ptr.insns = insn;
  244. self->u.ptr.len = len;
  245. return 0;
  246. }
  247. static int __bpf_fill_ja(struct bpf_test *self, unsigned int len,
  248. unsigned int plen)
  249. {
  250. struct sock_filter *insn;
  251. unsigned int rlen;
  252. int i, j;
  253. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  254. if (!insn)
  255. return -ENOMEM;
  256. rlen = (len % plen) - 1;
  257. for (i = 0; i + plen < len; i += plen)
  258. for (j = 0; j < plen; j++)
  259. insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA,
  260. plen - 1 - j, 0, 0);
  261. for (j = 0; j < rlen; j++)
  262. insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA, rlen - 1 - j,
  263. 0, 0);
  264. insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xababcbac);
  265. self->u.ptr.insns = insn;
  266. self->u.ptr.len = len;
  267. return 0;
  268. }
  269. static int bpf_fill_maxinsns11(struct bpf_test *self)
  270. {
  271. /* Hits 70 passes on x86_64 and triggers NOPs padding. */
  272. return __bpf_fill_ja(self, BPF_MAXINSNS, 68);
  273. }
  274. static int bpf_fill_maxinsns12(struct bpf_test *self)
  275. {
  276. unsigned int len = BPF_MAXINSNS;
  277. struct sock_filter *insn;
  278. int i = 0;
  279. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  280. if (!insn)
  281. return -ENOMEM;
  282. insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
  283. for (i = 1; i < len - 1; i++)
  284. insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
  285. insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
  286. self->u.ptr.insns = insn;
  287. self->u.ptr.len = len;
  288. return 0;
  289. }
  290. static int bpf_fill_maxinsns13(struct bpf_test *self)
  291. {
  292. unsigned int len = BPF_MAXINSNS;
  293. struct sock_filter *insn;
  294. int i = 0;
  295. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  296. if (!insn)
  297. return -ENOMEM;
  298. for (i = 0; i < len - 3; i++)
  299. insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
  300. insn[len - 3] = __BPF_STMT(BPF_LD | BPF_IMM, 0xabababab);
  301. insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0);
  302. insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
  303. self->u.ptr.insns = insn;
  304. self->u.ptr.len = len;
  305. return 0;
  306. }
  307. static int bpf_fill_ja(struct bpf_test *self)
  308. {
  309. /* Hits exactly 11 passes on x86_64 JIT. */
  310. return __bpf_fill_ja(self, 12, 9);
  311. }
  312. static int bpf_fill_ld_abs_get_processor_id(struct bpf_test *self)
  313. {
  314. unsigned int len = BPF_MAXINSNS;
  315. struct sock_filter *insn;
  316. int i;
  317. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  318. if (!insn)
  319. return -ENOMEM;
  320. for (i = 0; i < len - 1; i += 2) {
  321. insn[i] = __BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 0);
  322. insn[i + 1] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  323. SKF_AD_OFF + SKF_AD_CPU);
  324. }
  325. insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xbee);
  326. self->u.ptr.insns = insn;
  327. self->u.ptr.len = len;
  328. return 0;
  329. }
  330. static int __bpf_fill_stxdw(struct bpf_test *self, int size)
  331. {
  332. unsigned int len = BPF_MAXINSNS;
  333. struct bpf_insn *insn;
  334. int i;
  335. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  336. if (!insn)
  337. return -ENOMEM;
  338. insn[0] = BPF_ALU32_IMM(BPF_MOV, R0, 1);
  339. insn[1] = BPF_ST_MEM(size, R10, -40, 42);
  340. for (i = 2; i < len - 2; i++)
  341. insn[i] = BPF_STX_XADD(size, R10, R0, -40);
  342. insn[len - 2] = BPF_LDX_MEM(size, R0, R10, -40);
  343. insn[len - 1] = BPF_EXIT_INSN();
  344. self->u.ptr.insns = insn;
  345. self->u.ptr.len = len;
  346. self->stack_depth = 40;
  347. return 0;
  348. }
  349. static int bpf_fill_stxw(struct bpf_test *self)
  350. {
  351. return __bpf_fill_stxdw(self, BPF_W);
  352. }
  353. static int bpf_fill_stxdw(struct bpf_test *self)
  354. {
  355. return __bpf_fill_stxdw(self, BPF_DW);
  356. }
  357. static int __bpf_ld_imm64(struct bpf_insn insns[2], u8 reg, s64 imm64)
  358. {
  359. struct bpf_insn tmp[] = {BPF_LD_IMM64(reg, imm64)};
  360. memcpy(insns, tmp, sizeof(tmp));
  361. return 2;
  362. }
  363. /*
  364. * Branch conversion tests. Complex operations can expand to a lot
  365. * of instructions when JITed. This in turn may cause jump offsets
  366. * to overflow the field size of the native instruction, triggering
  367. * a branch conversion mechanism in some JITs.
  368. */
  369. static int __bpf_fill_max_jmp(struct bpf_test *self, int jmp, int imm)
  370. {
  371. struct bpf_insn *insns;
  372. int len = S16_MAX + 5;
  373. int i;
  374. insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
  375. if (!insns)
  376. return -ENOMEM;
  377. i = __bpf_ld_imm64(insns, R1, 0x0123456789abcdefULL);
  378. insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
  379. insns[i++] = BPF_JMP_IMM(jmp, R0, imm, S16_MAX);
  380. insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 2);
  381. insns[i++] = BPF_EXIT_INSN();
  382. while (i < len - 1) {
  383. static const int ops[] = {
  384. BPF_LSH, BPF_RSH, BPF_ARSH, BPF_ADD,
  385. BPF_SUB, BPF_MUL, BPF_DIV, BPF_MOD,
  386. };
  387. int op = ops[(i >> 1) % ARRAY_SIZE(ops)];
  388. if (i & 1)
  389. insns[i++] = BPF_ALU32_REG(op, R0, R1);
  390. else
  391. insns[i++] = BPF_ALU64_REG(op, R0, R1);
  392. }
  393. insns[i++] = BPF_EXIT_INSN();
  394. self->u.ptr.insns = insns;
  395. self->u.ptr.len = len;
  396. BUG_ON(i != len);
  397. return 0;
  398. }
  399. /* Branch taken by runtime decision */
  400. static int bpf_fill_max_jmp_taken(struct bpf_test *self)
  401. {
  402. return __bpf_fill_max_jmp(self, BPF_JEQ, 1);
  403. }
  404. /* Branch not taken by runtime decision */
  405. static int bpf_fill_max_jmp_not_taken(struct bpf_test *self)
  406. {
  407. return __bpf_fill_max_jmp(self, BPF_JEQ, 0);
  408. }
  409. /* Branch always taken, known at JIT time */
  410. static int bpf_fill_max_jmp_always_taken(struct bpf_test *self)
  411. {
  412. return __bpf_fill_max_jmp(self, BPF_JGE, 0);
  413. }
  414. /* Branch never taken, known at JIT time */
  415. static int bpf_fill_max_jmp_never_taken(struct bpf_test *self)
  416. {
  417. return __bpf_fill_max_jmp(self, BPF_JLT, 0);
  418. }
  419. /* ALU result computation used in tests */
  420. static bool __bpf_alu_result(u64 *res, u64 v1, u64 v2, u8 op)
  421. {
  422. *res = 0;
  423. switch (op) {
  424. case BPF_MOV:
  425. *res = v2;
  426. break;
  427. case BPF_AND:
  428. *res = v1 & v2;
  429. break;
  430. case BPF_OR:
  431. *res = v1 | v2;
  432. break;
  433. case BPF_XOR:
  434. *res = v1 ^ v2;
  435. break;
  436. case BPF_LSH:
  437. *res = v1 << v2;
  438. break;
  439. case BPF_RSH:
  440. *res = v1 >> v2;
  441. break;
  442. case BPF_ARSH:
  443. *res = v1 >> v2;
  444. if (v2 > 0 && v1 > S64_MAX)
  445. *res |= ~0ULL << (64 - v2);
  446. break;
  447. case BPF_ADD:
  448. *res = v1 + v2;
  449. break;
  450. case BPF_SUB:
  451. *res = v1 - v2;
  452. break;
  453. case BPF_MUL:
  454. *res = v1 * v2;
  455. break;
  456. case BPF_DIV:
  457. if (v2 == 0)
  458. return false;
  459. *res = div64_u64(v1, v2);
  460. break;
  461. case BPF_MOD:
  462. if (v2 == 0)
  463. return false;
  464. div64_u64_rem(v1, v2, res);
  465. break;
  466. }
  467. return true;
  468. }
  469. /* Test an ALU shift operation for all valid shift values */
  470. static int __bpf_fill_alu_shift(struct bpf_test *self, u8 op,
  471. u8 mode, bool alu32)
  472. {
  473. static const s64 regs[] = {
  474. 0x0123456789abcdefLL, /* dword > 0, word < 0 */
  475. 0xfedcba9876543210LL, /* dowrd < 0, word > 0 */
  476. 0xfedcba0198765432LL, /* dowrd < 0, word < 0 */
  477. 0x0123458967abcdefLL, /* dword > 0, word > 0 */
  478. };
  479. int bits = alu32 ? 32 : 64;
  480. int len = (2 + 7 * bits) * ARRAY_SIZE(regs) + 3;
  481. struct bpf_insn *insn;
  482. int imm, k;
  483. int i = 0;
  484. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  485. if (!insn)
  486. return -ENOMEM;
  487. insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
  488. for (k = 0; k < ARRAY_SIZE(regs); k++) {
  489. s64 reg = regs[k];
  490. i += __bpf_ld_imm64(&insn[i], R3, reg);
  491. for (imm = 0; imm < bits; imm++) {
  492. u64 val;
  493. /* Perform operation */
  494. insn[i++] = BPF_ALU64_REG(BPF_MOV, R1, R3);
  495. insn[i++] = BPF_ALU64_IMM(BPF_MOV, R2, imm);
  496. if (alu32) {
  497. if (mode == BPF_K)
  498. insn[i++] = BPF_ALU32_IMM(op, R1, imm);
  499. else
  500. insn[i++] = BPF_ALU32_REG(op, R1, R2);
  501. if (op == BPF_ARSH)
  502. reg = (s32)reg;
  503. else
  504. reg = (u32)reg;
  505. __bpf_alu_result(&val, reg, imm, op);
  506. val = (u32)val;
  507. } else {
  508. if (mode == BPF_K)
  509. insn[i++] = BPF_ALU64_IMM(op, R1, imm);
  510. else
  511. insn[i++] = BPF_ALU64_REG(op, R1, R2);
  512. __bpf_alu_result(&val, reg, imm, op);
  513. }
  514. /*
  515. * When debugging a JIT that fails this test, one
  516. * can write the immediate value to R0 here to find
  517. * out which operand values that fail.
  518. */
  519. /* Load reference and check the result */
  520. i += __bpf_ld_imm64(&insn[i], R4, val);
  521. insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R4, 1);
  522. insn[i++] = BPF_EXIT_INSN();
  523. }
  524. }
  525. insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
  526. insn[i++] = BPF_EXIT_INSN();
  527. self->u.ptr.insns = insn;
  528. self->u.ptr.len = len;
  529. BUG_ON(i != len);
  530. return 0;
  531. }
  532. static int bpf_fill_alu64_lsh_imm(struct bpf_test *self)
  533. {
  534. return __bpf_fill_alu_shift(self, BPF_LSH, BPF_K, false);
  535. }
  536. static int bpf_fill_alu64_rsh_imm(struct bpf_test *self)
  537. {
  538. return __bpf_fill_alu_shift(self, BPF_RSH, BPF_K, false);
  539. }
  540. static int bpf_fill_alu64_arsh_imm(struct bpf_test *self)
  541. {
  542. return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_K, false);
  543. }
  544. static int bpf_fill_alu64_lsh_reg(struct bpf_test *self)
  545. {
  546. return __bpf_fill_alu_shift(self, BPF_LSH, BPF_X, false);
  547. }
  548. static int bpf_fill_alu64_rsh_reg(struct bpf_test *self)
  549. {
  550. return __bpf_fill_alu_shift(self, BPF_RSH, BPF_X, false);
  551. }
  552. static int bpf_fill_alu64_arsh_reg(struct bpf_test *self)
  553. {
  554. return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_X, false);
  555. }
  556. static int bpf_fill_alu32_lsh_imm(struct bpf_test *self)
  557. {
  558. return __bpf_fill_alu_shift(self, BPF_LSH, BPF_K, true);
  559. }
  560. static int bpf_fill_alu32_rsh_imm(struct bpf_test *self)
  561. {
  562. return __bpf_fill_alu_shift(self, BPF_RSH, BPF_K, true);
  563. }
  564. static int bpf_fill_alu32_arsh_imm(struct bpf_test *self)
  565. {
  566. return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_K, true);
  567. }
  568. static int bpf_fill_alu32_lsh_reg(struct bpf_test *self)
  569. {
  570. return __bpf_fill_alu_shift(self, BPF_LSH, BPF_X, true);
  571. }
  572. static int bpf_fill_alu32_rsh_reg(struct bpf_test *self)
  573. {
  574. return __bpf_fill_alu_shift(self, BPF_RSH, BPF_X, true);
  575. }
  576. static int bpf_fill_alu32_arsh_reg(struct bpf_test *self)
  577. {
  578. return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_X, true);
  579. }
  580. /*
  581. * Test an ALU register shift operation for all valid shift values
  582. * for the case when the source and destination are the same.
  583. */
  584. static int __bpf_fill_alu_shift_same_reg(struct bpf_test *self, u8 op,
  585. bool alu32)
  586. {
  587. int bits = alu32 ? 32 : 64;
  588. int len = 3 + 6 * bits;
  589. struct bpf_insn *insn;
  590. int i = 0;
  591. u64 val;
  592. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  593. if (!insn)
  594. return -ENOMEM;
  595. insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
  596. for (val = 0; val < bits; val++) {
  597. u64 res;
  598. /* Perform operation */
  599. insn[i++] = BPF_ALU64_IMM(BPF_MOV, R1, val);
  600. if (alu32)
  601. insn[i++] = BPF_ALU32_REG(op, R1, R1);
  602. else
  603. insn[i++] = BPF_ALU64_REG(op, R1, R1);
  604. /* Compute the reference result */
  605. __bpf_alu_result(&res, val, val, op);
  606. if (alu32)
  607. res = (u32)res;
  608. i += __bpf_ld_imm64(&insn[i], R2, res);
  609. /* Check the actual result */
  610. insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
  611. insn[i++] = BPF_EXIT_INSN();
  612. }
  613. insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
  614. insn[i++] = BPF_EXIT_INSN();
  615. self->u.ptr.insns = insn;
  616. self->u.ptr.len = len;
  617. BUG_ON(i != len);
  618. return 0;
  619. }
  620. static int bpf_fill_alu64_lsh_same_reg(struct bpf_test *self)
  621. {
  622. return __bpf_fill_alu_shift_same_reg(self, BPF_LSH, false);
  623. }
  624. static int bpf_fill_alu64_rsh_same_reg(struct bpf_test *self)
  625. {
  626. return __bpf_fill_alu_shift_same_reg(self, BPF_RSH, false);
  627. }
  628. static int bpf_fill_alu64_arsh_same_reg(struct bpf_test *self)
  629. {
  630. return __bpf_fill_alu_shift_same_reg(self, BPF_ARSH, false);
  631. }
  632. static int bpf_fill_alu32_lsh_same_reg(struct bpf_test *self)
  633. {
  634. return __bpf_fill_alu_shift_same_reg(self, BPF_LSH, true);
  635. }
  636. static int bpf_fill_alu32_rsh_same_reg(struct bpf_test *self)
  637. {
  638. return __bpf_fill_alu_shift_same_reg(self, BPF_RSH, true);
  639. }
  640. static int bpf_fill_alu32_arsh_same_reg(struct bpf_test *self)
  641. {
  642. return __bpf_fill_alu_shift_same_reg(self, BPF_ARSH, true);
  643. }
  644. /*
  645. * Common operand pattern generator for exhaustive power-of-two magnitudes
  646. * tests. The block size parameters can be adjusted to increase/reduce the
  647. * number of combinatons tested and thereby execution speed and memory
  648. * footprint.
  649. */
  650. static inline s64 value(int msb, int delta, int sign)
  651. {
  652. return sign * (1LL << msb) + delta;
  653. }
  654. static int __bpf_fill_pattern(struct bpf_test *self, void *arg,
  655. int dbits, int sbits, int block1, int block2,
  656. int (*emit)(struct bpf_test*, void*,
  657. struct bpf_insn*, s64, s64))
  658. {
  659. static const int sgn[][2] = {{1, 1}, {1, -1}, {-1, 1}, {-1, -1}};
  660. struct bpf_insn *insns;
  661. int di, si, bt, db, sb;
  662. int count, len, k;
  663. int extra = 1 + 2;
  664. int i = 0;
  665. /* Total number of iterations for the two pattern */
  666. count = (dbits - 1) * (sbits - 1) * block1 * block1 * ARRAY_SIZE(sgn);
  667. count += (max(dbits, sbits) - 1) * block2 * block2 * ARRAY_SIZE(sgn);
  668. /* Compute the maximum number of insns and allocate the buffer */
  669. len = extra + count * (*emit)(self, arg, NULL, 0, 0);
  670. insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
  671. if (!insns)
  672. return -ENOMEM;
  673. /* Add head instruction(s) */
  674. insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
  675. /*
  676. * Pattern 1: all combinations of power-of-two magnitudes and sign,
  677. * and with a block of contiguous values around each magnitude.
  678. */
  679. for (di = 0; di < dbits - 1; di++) /* Dst magnitudes */
  680. for (si = 0; si < sbits - 1; si++) /* Src magnitudes */
  681. for (k = 0; k < ARRAY_SIZE(sgn); k++) /* Sign combos */
  682. for (db = -(block1 / 2);
  683. db < (block1 + 1) / 2; db++)
  684. for (sb = -(block1 / 2);
  685. sb < (block1 + 1) / 2; sb++) {
  686. s64 dst, src;
  687. dst = value(di, db, sgn[k][0]);
  688. src = value(si, sb, sgn[k][1]);
  689. i += (*emit)(self, arg,
  690. &insns[i],
  691. dst, src);
  692. }
  693. /*
  694. * Pattern 2: all combinations for a larger block of values
  695. * for each power-of-two magnitude and sign, where the magnitude is
  696. * the same for both operands.
  697. */
  698. for (bt = 0; bt < max(dbits, sbits) - 1; bt++) /* Magnitude */
  699. for (k = 0; k < ARRAY_SIZE(sgn); k++) /* Sign combos */
  700. for (db = -(block2 / 2); db < (block2 + 1) / 2; db++)
  701. for (sb = -(block2 / 2);
  702. sb < (block2 + 1) / 2; sb++) {
  703. s64 dst, src;
  704. dst = value(bt % dbits, db, sgn[k][0]);
  705. src = value(bt % sbits, sb, sgn[k][1]);
  706. i += (*emit)(self, arg, &insns[i],
  707. dst, src);
  708. }
  709. /* Append tail instructions */
  710. insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
  711. insns[i++] = BPF_EXIT_INSN();
  712. BUG_ON(i > len);
  713. self->u.ptr.insns = insns;
  714. self->u.ptr.len = i;
  715. return 0;
  716. }
  717. /*
  718. * Block size parameters used in pattern tests below. une as needed to
  719. * increase/reduce the number combinations tested, see following examples.
  720. * block values per operand MSB
  721. * ----------------------------------------
  722. * 0 none
  723. * 1 (1 << MSB)
  724. * 2 (1 << MSB) + [-1, 0]
  725. * 3 (1 << MSB) + [-1, 0, 1]
  726. */
  727. #define PATTERN_BLOCK1 1
  728. #define PATTERN_BLOCK2 5
  729. /* Number of test runs for a pattern test */
  730. #define NR_PATTERN_RUNS 1
  731. /*
  732. * Exhaustive tests of ALU operations for all combinations of power-of-two
  733. * magnitudes of the operands, both for positive and negative values. The
  734. * test is designed to verify e.g. the ALU and ALU64 operations for JITs that
  735. * emit different code depending on the magnitude of the immediate value.
  736. */
  737. static int __bpf_emit_alu64_imm(struct bpf_test *self, void *arg,
  738. struct bpf_insn *insns, s64 dst, s64 imm)
  739. {
  740. int op = *(int *)arg;
  741. int i = 0;
  742. u64 res;
  743. if (!insns)
  744. return 7;
  745. if (__bpf_alu_result(&res, dst, (s32)imm, op)) {
  746. i += __bpf_ld_imm64(&insns[i], R1, dst);
  747. i += __bpf_ld_imm64(&insns[i], R3, res);
  748. insns[i++] = BPF_ALU64_IMM(op, R1, imm);
  749. insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
  750. insns[i++] = BPF_EXIT_INSN();
  751. }
  752. return i;
  753. }
  754. static int __bpf_emit_alu32_imm(struct bpf_test *self, void *arg,
  755. struct bpf_insn *insns, s64 dst, s64 imm)
  756. {
  757. int op = *(int *)arg;
  758. int i = 0;
  759. u64 res;
  760. if (!insns)
  761. return 7;
  762. if (__bpf_alu_result(&res, (u32)dst, (u32)imm, op)) {
  763. i += __bpf_ld_imm64(&insns[i], R1, dst);
  764. i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
  765. insns[i++] = BPF_ALU32_IMM(op, R1, imm);
  766. insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
  767. insns[i++] = BPF_EXIT_INSN();
  768. }
  769. return i;
  770. }
  771. static int __bpf_emit_alu64_reg(struct bpf_test *self, void *arg,
  772. struct bpf_insn *insns, s64 dst, s64 src)
  773. {
  774. int op = *(int *)arg;
  775. int i = 0;
  776. u64 res;
  777. if (!insns)
  778. return 9;
  779. if (__bpf_alu_result(&res, dst, src, op)) {
  780. i += __bpf_ld_imm64(&insns[i], R1, dst);
  781. i += __bpf_ld_imm64(&insns[i], R2, src);
  782. i += __bpf_ld_imm64(&insns[i], R3, res);
  783. insns[i++] = BPF_ALU64_REG(op, R1, R2);
  784. insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
  785. insns[i++] = BPF_EXIT_INSN();
  786. }
  787. return i;
  788. }
  789. static int __bpf_emit_alu32_reg(struct bpf_test *self, void *arg,
  790. struct bpf_insn *insns, s64 dst, s64 src)
  791. {
  792. int op = *(int *)arg;
  793. int i = 0;
  794. u64 res;
  795. if (!insns)
  796. return 9;
  797. if (__bpf_alu_result(&res, (u32)dst, (u32)src, op)) {
  798. i += __bpf_ld_imm64(&insns[i], R1, dst);
  799. i += __bpf_ld_imm64(&insns[i], R2, src);
  800. i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
  801. insns[i++] = BPF_ALU32_REG(op, R1, R2);
  802. insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
  803. insns[i++] = BPF_EXIT_INSN();
  804. }
  805. return i;
  806. }
  807. static int __bpf_fill_alu64_imm(struct bpf_test *self, int op)
  808. {
  809. return __bpf_fill_pattern(self, &op, 64, 32,
  810. PATTERN_BLOCK1, PATTERN_BLOCK2,
  811. &__bpf_emit_alu64_imm);
  812. }
  813. static int __bpf_fill_alu32_imm(struct bpf_test *self, int op)
  814. {
  815. return __bpf_fill_pattern(self, &op, 64, 32,
  816. PATTERN_BLOCK1, PATTERN_BLOCK2,
  817. &__bpf_emit_alu32_imm);
  818. }
  819. static int __bpf_fill_alu64_reg(struct bpf_test *self, int op)
  820. {
  821. return __bpf_fill_pattern(self, &op, 64, 64,
  822. PATTERN_BLOCK1, PATTERN_BLOCK2,
  823. &__bpf_emit_alu64_reg);
  824. }
  825. static int __bpf_fill_alu32_reg(struct bpf_test *self, int op)
  826. {
  827. return __bpf_fill_pattern(self, &op, 64, 64,
  828. PATTERN_BLOCK1, PATTERN_BLOCK2,
  829. &__bpf_emit_alu32_reg);
  830. }
  831. /* ALU64 immediate operations */
  832. static int bpf_fill_alu64_mov_imm(struct bpf_test *self)
  833. {
  834. return __bpf_fill_alu64_imm(self, BPF_MOV);
  835. }
  836. static int bpf_fill_alu64_and_imm(struct bpf_test *self)
  837. {
  838. return __bpf_fill_alu64_imm(self, BPF_AND);
  839. }
  840. static int bpf_fill_alu64_or_imm(struct bpf_test *self)
  841. {
  842. return __bpf_fill_alu64_imm(self, BPF_OR);
  843. }
  844. static int bpf_fill_alu64_xor_imm(struct bpf_test *self)
  845. {
  846. return __bpf_fill_alu64_imm(self, BPF_XOR);
  847. }
  848. static int bpf_fill_alu64_add_imm(struct bpf_test *self)
  849. {
  850. return __bpf_fill_alu64_imm(self, BPF_ADD);
  851. }
  852. static int bpf_fill_alu64_sub_imm(struct bpf_test *self)
  853. {
  854. return __bpf_fill_alu64_imm(self, BPF_SUB);
  855. }
  856. static int bpf_fill_alu64_mul_imm(struct bpf_test *self)
  857. {
  858. return __bpf_fill_alu64_imm(self, BPF_MUL);
  859. }
  860. static int bpf_fill_alu64_div_imm(struct bpf_test *self)
  861. {
  862. return __bpf_fill_alu64_imm(self, BPF_DIV);
  863. }
  864. static int bpf_fill_alu64_mod_imm(struct bpf_test *self)
  865. {
  866. return __bpf_fill_alu64_imm(self, BPF_MOD);
  867. }
  868. /* ALU32 immediate operations */
  869. static int bpf_fill_alu32_mov_imm(struct bpf_test *self)
  870. {
  871. return __bpf_fill_alu32_imm(self, BPF_MOV);
  872. }
  873. static int bpf_fill_alu32_and_imm(struct bpf_test *self)
  874. {
  875. return __bpf_fill_alu32_imm(self, BPF_AND);
  876. }
  877. static int bpf_fill_alu32_or_imm(struct bpf_test *self)
  878. {
  879. return __bpf_fill_alu32_imm(self, BPF_OR);
  880. }
  881. static int bpf_fill_alu32_xor_imm(struct bpf_test *self)
  882. {
  883. return __bpf_fill_alu32_imm(self, BPF_XOR);
  884. }
  885. static int bpf_fill_alu32_add_imm(struct bpf_test *self)
  886. {
  887. return __bpf_fill_alu32_imm(self, BPF_ADD);
  888. }
  889. static int bpf_fill_alu32_sub_imm(struct bpf_test *self)
  890. {
  891. return __bpf_fill_alu32_imm(self, BPF_SUB);
  892. }
  893. static int bpf_fill_alu32_mul_imm(struct bpf_test *self)
  894. {
  895. return __bpf_fill_alu32_imm(self, BPF_MUL);
  896. }
  897. static int bpf_fill_alu32_div_imm(struct bpf_test *self)
  898. {
  899. return __bpf_fill_alu32_imm(self, BPF_DIV);
  900. }
  901. static int bpf_fill_alu32_mod_imm(struct bpf_test *self)
  902. {
  903. return __bpf_fill_alu32_imm(self, BPF_MOD);
  904. }
  905. /* ALU64 register operations */
  906. static int bpf_fill_alu64_mov_reg(struct bpf_test *self)
  907. {
  908. return __bpf_fill_alu64_reg(self, BPF_MOV);
  909. }
  910. static int bpf_fill_alu64_and_reg(struct bpf_test *self)
  911. {
  912. return __bpf_fill_alu64_reg(self, BPF_AND);
  913. }
  914. static int bpf_fill_alu64_or_reg(struct bpf_test *self)
  915. {
  916. return __bpf_fill_alu64_reg(self, BPF_OR);
  917. }
  918. static int bpf_fill_alu64_xor_reg(struct bpf_test *self)
  919. {
  920. return __bpf_fill_alu64_reg(self, BPF_XOR);
  921. }
  922. static int bpf_fill_alu64_add_reg(struct bpf_test *self)
  923. {
  924. return __bpf_fill_alu64_reg(self, BPF_ADD);
  925. }
  926. static int bpf_fill_alu64_sub_reg(struct bpf_test *self)
  927. {
  928. return __bpf_fill_alu64_reg(self, BPF_SUB);
  929. }
  930. static int bpf_fill_alu64_mul_reg(struct bpf_test *self)
  931. {
  932. return __bpf_fill_alu64_reg(self, BPF_MUL);
  933. }
  934. static int bpf_fill_alu64_div_reg(struct bpf_test *self)
  935. {
  936. return __bpf_fill_alu64_reg(self, BPF_DIV);
  937. }
  938. static int bpf_fill_alu64_mod_reg(struct bpf_test *self)
  939. {
  940. return __bpf_fill_alu64_reg(self, BPF_MOD);
  941. }
  942. /* ALU32 register operations */
  943. static int bpf_fill_alu32_mov_reg(struct bpf_test *self)
  944. {
  945. return __bpf_fill_alu32_reg(self, BPF_MOV);
  946. }
  947. static int bpf_fill_alu32_and_reg(struct bpf_test *self)
  948. {
  949. return __bpf_fill_alu32_reg(self, BPF_AND);
  950. }
  951. static int bpf_fill_alu32_or_reg(struct bpf_test *self)
  952. {
  953. return __bpf_fill_alu32_reg(self, BPF_OR);
  954. }
  955. static int bpf_fill_alu32_xor_reg(struct bpf_test *self)
  956. {
  957. return __bpf_fill_alu32_reg(self, BPF_XOR);
  958. }
  959. static int bpf_fill_alu32_add_reg(struct bpf_test *self)
  960. {
  961. return __bpf_fill_alu32_reg(self, BPF_ADD);
  962. }
  963. static int bpf_fill_alu32_sub_reg(struct bpf_test *self)
  964. {
  965. return __bpf_fill_alu32_reg(self, BPF_SUB);
  966. }
  967. static int bpf_fill_alu32_mul_reg(struct bpf_test *self)
  968. {
  969. return __bpf_fill_alu32_reg(self, BPF_MUL);
  970. }
  971. static int bpf_fill_alu32_div_reg(struct bpf_test *self)
  972. {
  973. return __bpf_fill_alu32_reg(self, BPF_DIV);
  974. }
  975. static int bpf_fill_alu32_mod_reg(struct bpf_test *self)
  976. {
  977. return __bpf_fill_alu32_reg(self, BPF_MOD);
  978. }
  979. /*
  980. * Test JITs that implement complex ALU operations as function
  981. * calls, and must re-arrange operands for argument passing.
  982. */
  983. static int __bpf_fill_alu_imm_regs(struct bpf_test *self, u8 op, bool alu32)
  984. {
  985. int len = 2 + 10 * 10;
  986. struct bpf_insn *insns;
  987. u64 dst, res;
  988. int i = 0;
  989. u32 imm;
  990. int rd;
  991. insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
  992. if (!insns)
  993. return -ENOMEM;
  994. /* Operand and result values according to operation */
  995. if (alu32)
  996. dst = 0x76543210U;
  997. else
  998. dst = 0x7edcba9876543210ULL;
  999. imm = 0x01234567U;
  1000. if (op == BPF_LSH || op == BPF_RSH || op == BPF_ARSH)
  1001. imm &= 31;
  1002. __bpf_alu_result(&res, dst, imm, op);
  1003. if (alu32)
  1004. res = (u32)res;
  1005. /* Check all operand registers */
  1006. for (rd = R0; rd <= R9; rd++) {
  1007. i += __bpf_ld_imm64(&insns[i], rd, dst);
  1008. if (alu32)
  1009. insns[i++] = BPF_ALU32_IMM(op, rd, imm);
  1010. else
  1011. insns[i++] = BPF_ALU64_IMM(op, rd, imm);
  1012. insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, res, 2);
  1013. insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
  1014. insns[i++] = BPF_EXIT_INSN();
  1015. insns[i++] = BPF_ALU64_IMM(BPF_RSH, rd, 32);
  1016. insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, res >> 32, 2);
  1017. insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
  1018. insns[i++] = BPF_EXIT_INSN();
  1019. }
  1020. insns[i++] = BPF_MOV64_IMM(R0, 1);
  1021. insns[i++] = BPF_EXIT_INSN();
  1022. self->u.ptr.insns = insns;
  1023. self->u.ptr.len = len;
  1024. BUG_ON(i != len);
  1025. return 0;
  1026. }
  1027. /* ALU64 K registers */
  1028. static int bpf_fill_alu64_mov_imm_regs(struct bpf_test *self)
  1029. {
  1030. return __bpf_fill_alu_imm_regs(self, BPF_MOV, false);
  1031. }
  1032. static int bpf_fill_alu64_and_imm_regs(struct bpf_test *self)
  1033. {
  1034. return __bpf_fill_alu_imm_regs(self, BPF_AND, false);
  1035. }
  1036. static int bpf_fill_alu64_or_imm_regs(struct bpf_test *self)
  1037. {
  1038. return __bpf_fill_alu_imm_regs(self, BPF_OR, false);
  1039. }
  1040. static int bpf_fill_alu64_xor_imm_regs(struct bpf_test *self)
  1041. {
  1042. return __bpf_fill_alu_imm_regs(self, BPF_XOR, false);
  1043. }
  1044. static int bpf_fill_alu64_lsh_imm_regs(struct bpf_test *self)
  1045. {
  1046. return __bpf_fill_alu_imm_regs(self, BPF_LSH, false);
  1047. }
  1048. static int bpf_fill_alu64_rsh_imm_regs(struct bpf_test *self)
  1049. {
  1050. return __bpf_fill_alu_imm_regs(self, BPF_RSH, false);
  1051. }
  1052. static int bpf_fill_alu64_arsh_imm_regs(struct bpf_test *self)
  1053. {
  1054. return __bpf_fill_alu_imm_regs(self, BPF_ARSH, false);
  1055. }
  1056. static int bpf_fill_alu64_add_imm_regs(struct bpf_test *self)
  1057. {
  1058. return __bpf_fill_alu_imm_regs(self, BPF_ADD, false);
  1059. }
  1060. static int bpf_fill_alu64_sub_imm_regs(struct bpf_test *self)
  1061. {
  1062. return __bpf_fill_alu_imm_regs(self, BPF_SUB, false);
  1063. }
  1064. static int bpf_fill_alu64_mul_imm_regs(struct bpf_test *self)
  1065. {
  1066. return __bpf_fill_alu_imm_regs(self, BPF_MUL, false);
  1067. }
  1068. static int bpf_fill_alu64_div_imm_regs(struct bpf_test *self)
  1069. {
  1070. return __bpf_fill_alu_imm_regs(self, BPF_DIV, false);
  1071. }
  1072. static int bpf_fill_alu64_mod_imm_regs(struct bpf_test *self)
  1073. {
  1074. return __bpf_fill_alu_imm_regs(self, BPF_MOD, false);
  1075. }
  1076. /* ALU32 K registers */
  1077. static int bpf_fill_alu32_mov_imm_regs(struct bpf_test *self)
  1078. {
  1079. return __bpf_fill_alu_imm_regs(self, BPF_MOV, true);
  1080. }
  1081. static int bpf_fill_alu32_and_imm_regs(struct bpf_test *self)
  1082. {
  1083. return __bpf_fill_alu_imm_regs(self, BPF_AND, true);
  1084. }
  1085. static int bpf_fill_alu32_or_imm_regs(struct bpf_test *self)
  1086. {
  1087. return __bpf_fill_alu_imm_regs(self, BPF_OR, true);
  1088. }
  1089. static int bpf_fill_alu32_xor_imm_regs(struct bpf_test *self)
  1090. {
  1091. return __bpf_fill_alu_imm_regs(self, BPF_XOR, true);
  1092. }
  1093. static int bpf_fill_alu32_lsh_imm_regs(struct bpf_test *self)
  1094. {
  1095. return __bpf_fill_alu_imm_regs(self, BPF_LSH, true);
  1096. }
  1097. static int bpf_fill_alu32_rsh_imm_regs(struct bpf_test *self)
  1098. {
  1099. return __bpf_fill_alu_imm_regs(self, BPF_RSH, true);
  1100. }
  1101. static int bpf_fill_alu32_arsh_imm_regs(struct bpf_test *self)
  1102. {
  1103. return __bpf_fill_alu_imm_regs(self, BPF_ARSH, true);
  1104. }
  1105. static int bpf_fill_alu32_add_imm_regs(struct bpf_test *self)
  1106. {
  1107. return __bpf_fill_alu_imm_regs(self, BPF_ADD, true);
  1108. }
  1109. static int bpf_fill_alu32_sub_imm_regs(struct bpf_test *self)
  1110. {
  1111. return __bpf_fill_alu_imm_regs(self, BPF_SUB, true);
  1112. }
  1113. static int bpf_fill_alu32_mul_imm_regs(struct bpf_test *self)
  1114. {
  1115. return __bpf_fill_alu_imm_regs(self, BPF_MUL, true);
  1116. }
  1117. static int bpf_fill_alu32_div_imm_regs(struct bpf_test *self)
  1118. {
  1119. return __bpf_fill_alu_imm_regs(self, BPF_DIV, true);
  1120. }
  1121. static int bpf_fill_alu32_mod_imm_regs(struct bpf_test *self)
  1122. {
  1123. return __bpf_fill_alu_imm_regs(self, BPF_MOD, true);
  1124. }
  1125. /*
  1126. * Test JITs that implement complex ALU operations as function
  1127. * calls, and must re-arrange operands for argument passing.
  1128. */
  1129. static int __bpf_fill_alu_reg_pairs(struct bpf_test *self, u8 op, bool alu32)
  1130. {
  1131. int len = 2 + 10 * 10 * 12;
  1132. u64 dst, src, res, same;
  1133. struct bpf_insn *insns;
  1134. int rd, rs;
  1135. int i = 0;
  1136. insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
  1137. if (!insns)
  1138. return -ENOMEM;
  1139. /* Operand and result values according to operation */
  1140. if (alu32) {
  1141. dst = 0x76543210U;
  1142. src = 0x01234567U;
  1143. } else {
  1144. dst = 0x7edcba9876543210ULL;
  1145. src = 0x0123456789abcdefULL;
  1146. }
  1147. if (op == BPF_LSH || op == BPF_RSH || op == BPF_ARSH)
  1148. src &= 31;
  1149. __bpf_alu_result(&res, dst, src, op);
  1150. __bpf_alu_result(&same, src, src, op);
  1151. if (alu32) {
  1152. res = (u32)res;
  1153. same = (u32)same;
  1154. }
  1155. /* Check all combinations of operand registers */
  1156. for (rd = R0; rd <= R9; rd++) {
  1157. for (rs = R0; rs <= R9; rs++) {
  1158. u64 val = rd == rs ? same : res;
  1159. i += __bpf_ld_imm64(&insns[i], rd, dst);
  1160. i += __bpf_ld_imm64(&insns[i], rs, src);
  1161. if (alu32)
  1162. insns[i++] = BPF_ALU32_REG(op, rd, rs);
  1163. else
  1164. insns[i++] = BPF_ALU64_REG(op, rd, rs);
  1165. insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, val, 2);
  1166. insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
  1167. insns[i++] = BPF_EXIT_INSN();
  1168. insns[i++] = BPF_ALU64_IMM(BPF_RSH, rd, 32);
  1169. insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, val >> 32, 2);
  1170. insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
  1171. insns[i++] = BPF_EXIT_INSN();
  1172. }
  1173. }
  1174. insns[i++] = BPF_MOV64_IMM(R0, 1);
  1175. insns[i++] = BPF_EXIT_INSN();
  1176. self->u.ptr.insns = insns;
  1177. self->u.ptr.len = len;
  1178. BUG_ON(i != len);
  1179. return 0;
  1180. }
  1181. /* ALU64 X register combinations */
  1182. static int bpf_fill_alu64_mov_reg_pairs(struct bpf_test *self)
  1183. {
  1184. return __bpf_fill_alu_reg_pairs(self, BPF_MOV, false);
  1185. }
  1186. static int bpf_fill_alu64_and_reg_pairs(struct bpf_test *self)
  1187. {
  1188. return __bpf_fill_alu_reg_pairs(self, BPF_AND, false);
  1189. }
  1190. static int bpf_fill_alu64_or_reg_pairs(struct bpf_test *self)
  1191. {
  1192. return __bpf_fill_alu_reg_pairs(self, BPF_OR, false);
  1193. }
  1194. static int bpf_fill_alu64_xor_reg_pairs(struct bpf_test *self)
  1195. {
  1196. return __bpf_fill_alu_reg_pairs(self, BPF_XOR, false);
  1197. }
  1198. static int bpf_fill_alu64_lsh_reg_pairs(struct bpf_test *self)
  1199. {
  1200. return __bpf_fill_alu_reg_pairs(self, BPF_LSH, false);
  1201. }
  1202. static int bpf_fill_alu64_rsh_reg_pairs(struct bpf_test *self)
  1203. {
  1204. return __bpf_fill_alu_reg_pairs(self, BPF_RSH, false);
  1205. }
  1206. static int bpf_fill_alu64_arsh_reg_pairs(struct bpf_test *self)
  1207. {
  1208. return __bpf_fill_alu_reg_pairs(self, BPF_ARSH, false);
  1209. }
  1210. static int bpf_fill_alu64_add_reg_pairs(struct bpf_test *self)
  1211. {
  1212. return __bpf_fill_alu_reg_pairs(self, BPF_ADD, false);
  1213. }
  1214. static int bpf_fill_alu64_sub_reg_pairs(struct bpf_test *self)
  1215. {
  1216. return __bpf_fill_alu_reg_pairs(self, BPF_SUB, false);
  1217. }
  1218. static int bpf_fill_alu64_mul_reg_pairs(struct bpf_test *self)
  1219. {
  1220. return __bpf_fill_alu_reg_pairs(self, BPF_MUL, false);
  1221. }
  1222. static int bpf_fill_alu64_div_reg_pairs(struct bpf_test *self)
  1223. {
  1224. return __bpf_fill_alu_reg_pairs(self, BPF_DIV, false);
  1225. }
  1226. static int bpf_fill_alu64_mod_reg_pairs(struct bpf_test *self)
  1227. {
  1228. return __bpf_fill_alu_reg_pairs(self, BPF_MOD, false);
  1229. }
  1230. /* ALU32 X register combinations */
  1231. static int bpf_fill_alu32_mov_reg_pairs(struct bpf_test *self)
  1232. {
  1233. return __bpf_fill_alu_reg_pairs(self, BPF_MOV, true);
  1234. }
  1235. static int bpf_fill_alu32_and_reg_pairs(struct bpf_test *self)
  1236. {
  1237. return __bpf_fill_alu_reg_pairs(self, BPF_AND, true);
  1238. }
  1239. static int bpf_fill_alu32_or_reg_pairs(struct bpf_test *self)
  1240. {
  1241. return __bpf_fill_alu_reg_pairs(self, BPF_OR, true);
  1242. }
  1243. static int bpf_fill_alu32_xor_reg_pairs(struct bpf_test *self)
  1244. {
  1245. return __bpf_fill_alu_reg_pairs(self, BPF_XOR, true);
  1246. }
  1247. static int bpf_fill_alu32_lsh_reg_pairs(struct bpf_test *self)
  1248. {
  1249. return __bpf_fill_alu_reg_pairs(self, BPF_LSH, true);
  1250. }
  1251. static int bpf_fill_alu32_rsh_reg_pairs(struct bpf_test *self)
  1252. {
  1253. return __bpf_fill_alu_reg_pairs(self, BPF_RSH, true);
  1254. }
  1255. static int bpf_fill_alu32_arsh_reg_pairs(struct bpf_test *self)
  1256. {
  1257. return __bpf_fill_alu_reg_pairs(self, BPF_ARSH, true);
  1258. }
  1259. static int bpf_fill_alu32_add_reg_pairs(struct bpf_test *self)
  1260. {
  1261. return __bpf_fill_alu_reg_pairs(self, BPF_ADD, true);
  1262. }
  1263. static int bpf_fill_alu32_sub_reg_pairs(struct bpf_test *self)
  1264. {
  1265. return __bpf_fill_alu_reg_pairs(self, BPF_SUB, true);
  1266. }
  1267. static int bpf_fill_alu32_mul_reg_pairs(struct bpf_test *self)
  1268. {
  1269. return __bpf_fill_alu_reg_pairs(self, BPF_MUL, true);
  1270. }
  1271. static int bpf_fill_alu32_div_reg_pairs(struct bpf_test *self)
  1272. {
  1273. return __bpf_fill_alu_reg_pairs(self, BPF_DIV, true);
  1274. }
  1275. static int bpf_fill_alu32_mod_reg_pairs(struct bpf_test *self)
  1276. {
  1277. return __bpf_fill_alu_reg_pairs(self, BPF_MOD, true);
  1278. }
  1279. /*
  1280. * Exhaustive tests of atomic operations for all power-of-two operand
  1281. * magnitudes, both for positive and negative values.
  1282. */
  1283. static int __bpf_emit_atomic64(struct bpf_test *self, void *arg,
  1284. struct bpf_insn *insns, s64 dst, s64 src)
  1285. {
  1286. int op = *(int *)arg;
  1287. u64 keep, fetch, res;
  1288. int i = 0;
  1289. if (!insns)
  1290. return 21;
  1291. switch (op) {
  1292. case BPF_XCHG:
  1293. res = src;
  1294. break;
  1295. default:
  1296. __bpf_alu_result(&res, dst, src, BPF_OP(op));
  1297. }
  1298. keep = 0x0123456789abcdefULL;
  1299. if (op & BPF_FETCH)
  1300. fetch = dst;
  1301. else
  1302. fetch = src;
  1303. i += __bpf_ld_imm64(&insns[i], R0, keep);
  1304. i += __bpf_ld_imm64(&insns[i], R1, dst);
  1305. i += __bpf_ld_imm64(&insns[i], R2, src);
  1306. i += __bpf_ld_imm64(&insns[i], R3, res);
  1307. i += __bpf_ld_imm64(&insns[i], R4, fetch);
  1308. i += __bpf_ld_imm64(&insns[i], R5, keep);
  1309. insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8);
  1310. insns[i++] = BPF_ATOMIC_OP(BPF_DW, op, R10, R2, -8);
  1311. insns[i++] = BPF_LDX_MEM(BPF_DW, R1, R10, -8);
  1312. insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
  1313. insns[i++] = BPF_EXIT_INSN();
  1314. insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R4, 1);
  1315. insns[i++] = BPF_EXIT_INSN();
  1316. insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1);
  1317. insns[i++] = BPF_EXIT_INSN();
  1318. return i;
  1319. }
  1320. static int __bpf_emit_atomic32(struct bpf_test *self, void *arg,
  1321. struct bpf_insn *insns, s64 dst, s64 src)
  1322. {
  1323. int op = *(int *)arg;
  1324. u64 keep, fetch, res;
  1325. int i = 0;
  1326. if (!insns)
  1327. return 21;
  1328. switch (op) {
  1329. case BPF_XCHG:
  1330. res = src;
  1331. break;
  1332. default:
  1333. __bpf_alu_result(&res, (u32)dst, (u32)src, BPF_OP(op));
  1334. }
  1335. keep = 0x0123456789abcdefULL;
  1336. if (op & BPF_FETCH)
  1337. fetch = (u32)dst;
  1338. else
  1339. fetch = src;
  1340. i += __bpf_ld_imm64(&insns[i], R0, keep);
  1341. i += __bpf_ld_imm64(&insns[i], R1, (u32)dst);
  1342. i += __bpf_ld_imm64(&insns[i], R2, src);
  1343. i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
  1344. i += __bpf_ld_imm64(&insns[i], R4, fetch);
  1345. i += __bpf_ld_imm64(&insns[i], R5, keep);
  1346. insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4);
  1347. insns[i++] = BPF_ATOMIC_OP(BPF_W, op, R10, R2, -4);
  1348. insns[i++] = BPF_LDX_MEM(BPF_W, R1, R10, -4);
  1349. insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
  1350. insns[i++] = BPF_EXIT_INSN();
  1351. insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R4, 1);
  1352. insns[i++] = BPF_EXIT_INSN();
  1353. insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1);
  1354. insns[i++] = BPF_EXIT_INSN();
  1355. return i;
  1356. }
  1357. static int __bpf_emit_cmpxchg64(struct bpf_test *self, void *arg,
  1358. struct bpf_insn *insns, s64 dst, s64 src)
  1359. {
  1360. int i = 0;
  1361. if (!insns)
  1362. return 23;
  1363. i += __bpf_ld_imm64(&insns[i], R0, ~dst);
  1364. i += __bpf_ld_imm64(&insns[i], R1, dst);
  1365. i += __bpf_ld_imm64(&insns[i], R2, src);
  1366. /* Result unsuccessful */
  1367. insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8);
  1368. insns[i++] = BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -8);
  1369. insns[i++] = BPF_LDX_MEM(BPF_DW, R3, R10, -8);
  1370. insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 2);
  1371. insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
  1372. insns[i++] = BPF_EXIT_INSN();
  1373. insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R3, 2);
  1374. insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
  1375. insns[i++] = BPF_EXIT_INSN();
  1376. /* Result successful */
  1377. insns[i++] = BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -8);
  1378. insns[i++] = BPF_LDX_MEM(BPF_DW, R3, R10, -8);
  1379. insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R3, 2);
  1380. insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
  1381. insns[i++] = BPF_EXIT_INSN();
  1382. insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
  1383. insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
  1384. insns[i++] = BPF_EXIT_INSN();
  1385. return i;
  1386. }
  1387. static int __bpf_emit_cmpxchg32(struct bpf_test *self, void *arg,
  1388. struct bpf_insn *insns, s64 dst, s64 src)
  1389. {
  1390. int i = 0;
  1391. if (!insns)
  1392. return 27;
  1393. i += __bpf_ld_imm64(&insns[i], R0, ~dst);
  1394. i += __bpf_ld_imm64(&insns[i], R1, (u32)dst);
  1395. i += __bpf_ld_imm64(&insns[i], R2, src);
  1396. /* Result unsuccessful */
  1397. insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4);
  1398. insns[i++] = BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R2, -4);
  1399. insns[i++] = BPF_ZEXT_REG(R0), /* Zext always inserted by verifier */
  1400. insns[i++] = BPF_LDX_MEM(BPF_W, R3, R10, -4);
  1401. insns[i++] = BPF_JMP32_REG(BPF_JEQ, R1, R3, 2);
  1402. insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
  1403. insns[i++] = BPF_EXIT_INSN();
  1404. insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R3, 2);
  1405. insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
  1406. insns[i++] = BPF_EXIT_INSN();
  1407. /* Result successful */
  1408. i += __bpf_ld_imm64(&insns[i], R0, dst);
  1409. insns[i++] = BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R2, -4);
  1410. insns[i++] = BPF_ZEXT_REG(R0), /* Zext always inserted by verifier */
  1411. insns[i++] = BPF_LDX_MEM(BPF_W, R3, R10, -4);
  1412. insns[i++] = BPF_JMP32_REG(BPF_JEQ, R2, R3, 2);
  1413. insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
  1414. insns[i++] = BPF_EXIT_INSN();
  1415. insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
  1416. insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
  1417. insns[i++] = BPF_EXIT_INSN();
  1418. return i;
  1419. }
  1420. static int __bpf_fill_atomic64(struct bpf_test *self, int op)
  1421. {
  1422. return __bpf_fill_pattern(self, &op, 64, 64,
  1423. 0, PATTERN_BLOCK2,
  1424. &__bpf_emit_atomic64);
  1425. }
  1426. static int __bpf_fill_atomic32(struct bpf_test *self, int op)
  1427. {
  1428. return __bpf_fill_pattern(self, &op, 64, 64,
  1429. 0, PATTERN_BLOCK2,
  1430. &__bpf_emit_atomic32);
  1431. }
  1432. /* 64-bit atomic operations */
  1433. static int bpf_fill_atomic64_add(struct bpf_test *self)
  1434. {
  1435. return __bpf_fill_atomic64(self, BPF_ADD);
  1436. }
  1437. static int bpf_fill_atomic64_and(struct bpf_test *self)
  1438. {
  1439. return __bpf_fill_atomic64(self, BPF_AND);
  1440. }
  1441. static int bpf_fill_atomic64_or(struct bpf_test *self)
  1442. {
  1443. return __bpf_fill_atomic64(self, BPF_OR);
  1444. }
  1445. static int bpf_fill_atomic64_xor(struct bpf_test *self)
  1446. {
  1447. return __bpf_fill_atomic64(self, BPF_XOR);
  1448. }
  1449. static int bpf_fill_atomic64_add_fetch(struct bpf_test *self)
  1450. {
  1451. return __bpf_fill_atomic64(self, BPF_ADD | BPF_FETCH);
  1452. }
  1453. static int bpf_fill_atomic64_and_fetch(struct bpf_test *self)
  1454. {
  1455. return __bpf_fill_atomic64(self, BPF_AND | BPF_FETCH);
  1456. }
  1457. static int bpf_fill_atomic64_or_fetch(struct bpf_test *self)
  1458. {
  1459. return __bpf_fill_atomic64(self, BPF_OR | BPF_FETCH);
  1460. }
  1461. static int bpf_fill_atomic64_xor_fetch(struct bpf_test *self)
  1462. {
  1463. return __bpf_fill_atomic64(self, BPF_XOR | BPF_FETCH);
  1464. }
  1465. static int bpf_fill_atomic64_xchg(struct bpf_test *self)
  1466. {
  1467. return __bpf_fill_atomic64(self, BPF_XCHG);
  1468. }
  1469. static int bpf_fill_cmpxchg64(struct bpf_test *self)
  1470. {
  1471. return __bpf_fill_pattern(self, NULL, 64, 64, 0, PATTERN_BLOCK2,
  1472. &__bpf_emit_cmpxchg64);
  1473. }
  1474. /* 32-bit atomic operations */
  1475. static int bpf_fill_atomic32_add(struct bpf_test *self)
  1476. {
  1477. return __bpf_fill_atomic32(self, BPF_ADD);
  1478. }
  1479. static int bpf_fill_atomic32_and(struct bpf_test *self)
  1480. {
  1481. return __bpf_fill_atomic32(self, BPF_AND);
  1482. }
  1483. static int bpf_fill_atomic32_or(struct bpf_test *self)
  1484. {
  1485. return __bpf_fill_atomic32(self, BPF_OR);
  1486. }
  1487. static int bpf_fill_atomic32_xor(struct bpf_test *self)
  1488. {
  1489. return __bpf_fill_atomic32(self, BPF_XOR);
  1490. }
  1491. static int bpf_fill_atomic32_add_fetch(struct bpf_test *self)
  1492. {
  1493. return __bpf_fill_atomic32(self, BPF_ADD | BPF_FETCH);
  1494. }
  1495. static int bpf_fill_atomic32_and_fetch(struct bpf_test *self)
  1496. {
  1497. return __bpf_fill_atomic32(self, BPF_AND | BPF_FETCH);
  1498. }
  1499. static int bpf_fill_atomic32_or_fetch(struct bpf_test *self)
  1500. {
  1501. return __bpf_fill_atomic32(self, BPF_OR | BPF_FETCH);
  1502. }
  1503. static int bpf_fill_atomic32_xor_fetch(struct bpf_test *self)
  1504. {
  1505. return __bpf_fill_atomic32(self, BPF_XOR | BPF_FETCH);
  1506. }
  1507. static int bpf_fill_atomic32_xchg(struct bpf_test *self)
  1508. {
  1509. return __bpf_fill_atomic32(self, BPF_XCHG);
  1510. }
  1511. static int bpf_fill_cmpxchg32(struct bpf_test *self)
  1512. {
  1513. return __bpf_fill_pattern(self, NULL, 64, 64, 0, PATTERN_BLOCK2,
  1514. &__bpf_emit_cmpxchg32);
  1515. }
  1516. /*
  1517. * Test JITs that implement ATOMIC operations as function calls or
  1518. * other primitives, and must re-arrange operands for argument passing.
  1519. */
  1520. static int __bpf_fill_atomic_reg_pairs(struct bpf_test *self, u8 width, u8 op)
  1521. {
  1522. struct bpf_insn *insn;
  1523. int len = 2 + 34 * 10 * 10;
  1524. u64 mem, upd, res;
  1525. int rd, rs, i = 0;
  1526. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  1527. if (!insn)
  1528. return -ENOMEM;
  1529. /* Operand and memory values */
  1530. if (width == BPF_DW) {
  1531. mem = 0x0123456789abcdefULL;
  1532. upd = 0xfedcba9876543210ULL;
  1533. } else { /* BPF_W */
  1534. mem = 0x01234567U;
  1535. upd = 0x76543210U;
  1536. }
  1537. /* Memory updated according to operation */
  1538. switch (op) {
  1539. case BPF_XCHG:
  1540. res = upd;
  1541. break;
  1542. case BPF_CMPXCHG:
  1543. res = mem;
  1544. break;
  1545. default:
  1546. __bpf_alu_result(&res, mem, upd, BPF_OP(op));
  1547. }
  1548. /* Test all operand registers */
  1549. for (rd = R0; rd <= R9; rd++) {
  1550. for (rs = R0; rs <= R9; rs++) {
  1551. u64 cmp, src;
  1552. /* Initialize value in memory */
  1553. i += __bpf_ld_imm64(&insn[i], R0, mem);
  1554. insn[i++] = BPF_STX_MEM(width, R10, R0, -8);
  1555. /* Initialize registers in order */
  1556. i += __bpf_ld_imm64(&insn[i], R0, ~mem);
  1557. i += __bpf_ld_imm64(&insn[i], rs, upd);
  1558. insn[i++] = BPF_MOV64_REG(rd, R10);
  1559. /* Perform atomic operation */
  1560. insn[i++] = BPF_ATOMIC_OP(width, op, rd, rs, -8);
  1561. if (op == BPF_CMPXCHG && width == BPF_W)
  1562. insn[i++] = BPF_ZEXT_REG(R0);
  1563. /* Check R0 register value */
  1564. if (op == BPF_CMPXCHG)
  1565. cmp = mem; /* Expect value from memory */
  1566. else if (R0 == rd || R0 == rs)
  1567. cmp = 0; /* Aliased, checked below */
  1568. else
  1569. cmp = ~mem; /* Expect value to be preserved */
  1570. if (cmp) {
  1571. insn[i++] = BPF_JMP32_IMM(BPF_JEQ, R0,
  1572. (u32)cmp, 2);
  1573. insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
  1574. insn[i++] = BPF_EXIT_INSN();
  1575. insn[i++] = BPF_ALU64_IMM(BPF_RSH, R0, 32);
  1576. insn[i++] = BPF_JMP32_IMM(BPF_JEQ, R0,
  1577. cmp >> 32, 2);
  1578. insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
  1579. insn[i++] = BPF_EXIT_INSN();
  1580. }
  1581. /* Check source register value */
  1582. if (rs == R0 && op == BPF_CMPXCHG)
  1583. src = 0; /* Aliased with R0, checked above */
  1584. else if (rs == rd && (op == BPF_CMPXCHG ||
  1585. !(op & BPF_FETCH)))
  1586. src = 0; /* Aliased with rd, checked below */
  1587. else if (op == BPF_CMPXCHG)
  1588. src = upd; /* Expect value to be preserved */
  1589. else if (op & BPF_FETCH)
  1590. src = mem; /* Expect fetched value from mem */
  1591. else /* no fetch */
  1592. src = upd; /* Expect value to be preserved */
  1593. if (src) {
  1594. insn[i++] = BPF_JMP32_IMM(BPF_JEQ, rs,
  1595. (u32)src, 2);
  1596. insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
  1597. insn[i++] = BPF_EXIT_INSN();
  1598. insn[i++] = BPF_ALU64_IMM(BPF_RSH, rs, 32);
  1599. insn[i++] = BPF_JMP32_IMM(BPF_JEQ, rs,
  1600. src >> 32, 2);
  1601. insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
  1602. insn[i++] = BPF_EXIT_INSN();
  1603. }
  1604. /* Check destination register value */
  1605. if (!(rd == R0 && op == BPF_CMPXCHG) &&
  1606. !(rd == rs && (op & BPF_FETCH))) {
  1607. insn[i++] = BPF_JMP_REG(BPF_JEQ, rd, R10, 2);
  1608. insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
  1609. insn[i++] = BPF_EXIT_INSN();
  1610. }
  1611. /* Check value in memory */
  1612. if (rs != rd) { /* No aliasing */
  1613. i += __bpf_ld_imm64(&insn[i], R1, res);
  1614. } else if (op == BPF_XCHG) { /* Aliased, XCHG */
  1615. insn[i++] = BPF_MOV64_REG(R1, R10);
  1616. } else if (op == BPF_CMPXCHG) { /* Aliased, CMPXCHG */
  1617. i += __bpf_ld_imm64(&insn[i], R1, mem);
  1618. } else { /* Aliased, ALU oper */
  1619. i += __bpf_ld_imm64(&insn[i], R1, mem);
  1620. insn[i++] = BPF_ALU64_REG(BPF_OP(op), R1, R10);
  1621. }
  1622. insn[i++] = BPF_LDX_MEM(width, R0, R10, -8);
  1623. if (width == BPF_DW)
  1624. insn[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
  1625. else /* width == BPF_W */
  1626. insn[i++] = BPF_JMP32_REG(BPF_JEQ, R0, R1, 2);
  1627. insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
  1628. insn[i++] = BPF_EXIT_INSN();
  1629. }
  1630. }
  1631. insn[i++] = BPF_MOV64_IMM(R0, 1);
  1632. insn[i++] = BPF_EXIT_INSN();
  1633. self->u.ptr.insns = insn;
  1634. self->u.ptr.len = i;
  1635. BUG_ON(i > len);
  1636. return 0;
  1637. }
  1638. /* 64-bit atomic register tests */
  1639. static int bpf_fill_atomic64_add_reg_pairs(struct bpf_test *self)
  1640. {
  1641. return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_ADD);
  1642. }
  1643. static int bpf_fill_atomic64_and_reg_pairs(struct bpf_test *self)
  1644. {
  1645. return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_AND);
  1646. }
  1647. static int bpf_fill_atomic64_or_reg_pairs(struct bpf_test *self)
  1648. {
  1649. return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_OR);
  1650. }
  1651. static int bpf_fill_atomic64_xor_reg_pairs(struct bpf_test *self)
  1652. {
  1653. return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XOR);
  1654. }
  1655. static int bpf_fill_atomic64_add_fetch_reg_pairs(struct bpf_test *self)
  1656. {
  1657. return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_ADD | BPF_FETCH);
  1658. }
  1659. static int bpf_fill_atomic64_and_fetch_reg_pairs(struct bpf_test *self)
  1660. {
  1661. return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_AND | BPF_FETCH);
  1662. }
  1663. static int bpf_fill_atomic64_or_fetch_reg_pairs(struct bpf_test *self)
  1664. {
  1665. return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_OR | BPF_FETCH);
  1666. }
  1667. static int bpf_fill_atomic64_xor_fetch_reg_pairs(struct bpf_test *self)
  1668. {
  1669. return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XOR | BPF_FETCH);
  1670. }
  1671. static int bpf_fill_atomic64_xchg_reg_pairs(struct bpf_test *self)
  1672. {
  1673. return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XCHG);
  1674. }
  1675. static int bpf_fill_atomic64_cmpxchg_reg_pairs(struct bpf_test *self)
  1676. {
  1677. return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_CMPXCHG);
  1678. }
  1679. /* 32-bit atomic register tests */
  1680. static int bpf_fill_atomic32_add_reg_pairs(struct bpf_test *self)
  1681. {
  1682. return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_ADD);
  1683. }
  1684. static int bpf_fill_atomic32_and_reg_pairs(struct bpf_test *self)
  1685. {
  1686. return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_AND);
  1687. }
  1688. static int bpf_fill_atomic32_or_reg_pairs(struct bpf_test *self)
  1689. {
  1690. return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_OR);
  1691. }
  1692. static int bpf_fill_atomic32_xor_reg_pairs(struct bpf_test *self)
  1693. {
  1694. return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XOR);
  1695. }
  1696. static int bpf_fill_atomic32_add_fetch_reg_pairs(struct bpf_test *self)
  1697. {
  1698. return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_ADD | BPF_FETCH);
  1699. }
  1700. static int bpf_fill_atomic32_and_fetch_reg_pairs(struct bpf_test *self)
  1701. {
  1702. return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_AND | BPF_FETCH);
  1703. }
  1704. static int bpf_fill_atomic32_or_fetch_reg_pairs(struct bpf_test *self)
  1705. {
  1706. return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_OR | BPF_FETCH);
  1707. }
  1708. static int bpf_fill_atomic32_xor_fetch_reg_pairs(struct bpf_test *self)
  1709. {
  1710. return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XOR | BPF_FETCH);
  1711. }
  1712. static int bpf_fill_atomic32_xchg_reg_pairs(struct bpf_test *self)
  1713. {
  1714. return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XCHG);
  1715. }
  1716. static int bpf_fill_atomic32_cmpxchg_reg_pairs(struct bpf_test *self)
  1717. {
  1718. return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_CMPXCHG);
  1719. }
  1720. /*
  1721. * Test the two-instruction 64-bit immediate load operation for all
  1722. * power-of-two magnitudes of the immediate operand. For each MSB, a block
  1723. * of immediate values centered around the power-of-two MSB are tested,
  1724. * both for positive and negative values. The test is designed to verify
  1725. * the operation for JITs that emit different code depending on the magnitude
  1726. * of the immediate value. This is often the case if the native instruction
  1727. * immediate field width is narrower than 32 bits.
  1728. */
  1729. static int bpf_fill_ld_imm64_magn(struct bpf_test *self)
  1730. {
  1731. int block = 64; /* Increase for more tests per MSB position */
  1732. int len = 3 + 8 * 63 * block * 2;
  1733. struct bpf_insn *insn;
  1734. int bit, adj, sign;
  1735. int i = 0;
  1736. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  1737. if (!insn)
  1738. return -ENOMEM;
  1739. insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
  1740. for (bit = 0; bit <= 62; bit++) {
  1741. for (adj = -block / 2; adj < block / 2; adj++) {
  1742. for (sign = -1; sign <= 1; sign += 2) {
  1743. s64 imm = sign * ((1LL << bit) + adj);
  1744. /* Perform operation */
  1745. i += __bpf_ld_imm64(&insn[i], R1, imm);
  1746. /* Load reference */
  1747. insn[i++] = BPF_ALU32_IMM(BPF_MOV, R2, imm);
  1748. insn[i++] = BPF_ALU32_IMM(BPF_MOV, R3,
  1749. (u32)(imm >> 32));
  1750. insn[i++] = BPF_ALU64_IMM(BPF_LSH, R3, 32);
  1751. insn[i++] = BPF_ALU64_REG(BPF_OR, R2, R3);
  1752. /* Check result */
  1753. insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
  1754. insn[i++] = BPF_EXIT_INSN();
  1755. }
  1756. }
  1757. }
  1758. insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
  1759. insn[i++] = BPF_EXIT_INSN();
  1760. self->u.ptr.insns = insn;
  1761. self->u.ptr.len = len;
  1762. BUG_ON(i != len);
  1763. return 0;
  1764. }
  1765. /*
  1766. * Test the two-instruction 64-bit immediate load operation for different
  1767. * combinations of bytes. Each byte in the 64-bit word is constructed as
  1768. * (base & mask) | (rand() & ~mask), where rand() is a deterministic LCG.
  1769. * All patterns (base1, mask1) and (base2, mask2) bytes are tested.
  1770. */
  1771. static int __bpf_fill_ld_imm64_bytes(struct bpf_test *self,
  1772. u8 base1, u8 mask1,
  1773. u8 base2, u8 mask2)
  1774. {
  1775. struct bpf_insn *insn;
  1776. int len = 3 + 8 * BIT(8);
  1777. int pattern, index;
  1778. u32 rand = 1;
  1779. int i = 0;
  1780. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  1781. if (!insn)
  1782. return -ENOMEM;
  1783. insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
  1784. for (pattern = 0; pattern < BIT(8); pattern++) {
  1785. u64 imm = 0;
  1786. for (index = 0; index < 8; index++) {
  1787. int byte;
  1788. if (pattern & BIT(index))
  1789. byte = (base1 & mask1) | (rand & ~mask1);
  1790. else
  1791. byte = (base2 & mask2) | (rand & ~mask2);
  1792. imm = (imm << 8) | byte;
  1793. }
  1794. /* Update our LCG */
  1795. rand = rand * 1664525 + 1013904223;
  1796. /* Perform operation */
  1797. i += __bpf_ld_imm64(&insn[i], R1, imm);
  1798. /* Load reference */
  1799. insn[i++] = BPF_ALU32_IMM(BPF_MOV, R2, imm);
  1800. insn[i++] = BPF_ALU32_IMM(BPF_MOV, R3, (u32)(imm >> 32));
  1801. insn[i++] = BPF_ALU64_IMM(BPF_LSH, R3, 32);
  1802. insn[i++] = BPF_ALU64_REG(BPF_OR, R2, R3);
  1803. /* Check result */
  1804. insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
  1805. insn[i++] = BPF_EXIT_INSN();
  1806. }
  1807. insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
  1808. insn[i++] = BPF_EXIT_INSN();
  1809. self->u.ptr.insns = insn;
  1810. self->u.ptr.len = len;
  1811. BUG_ON(i != len);
  1812. return 0;
  1813. }
  1814. static int bpf_fill_ld_imm64_checker(struct bpf_test *self)
  1815. {
  1816. return __bpf_fill_ld_imm64_bytes(self, 0, 0xff, 0xff, 0xff);
  1817. }
  1818. static int bpf_fill_ld_imm64_pos_neg(struct bpf_test *self)
  1819. {
  1820. return __bpf_fill_ld_imm64_bytes(self, 1, 0x81, 0x80, 0x80);
  1821. }
  1822. static int bpf_fill_ld_imm64_pos_zero(struct bpf_test *self)
  1823. {
  1824. return __bpf_fill_ld_imm64_bytes(self, 1, 0x81, 0, 0xff);
  1825. }
  1826. static int bpf_fill_ld_imm64_neg_zero(struct bpf_test *self)
  1827. {
  1828. return __bpf_fill_ld_imm64_bytes(self, 0x80, 0x80, 0, 0xff);
  1829. }
  1830. /*
  1831. * Exhaustive tests of JMP operations for all combinations of power-of-two
  1832. * magnitudes of the operands, both for positive and negative values. The
  1833. * test is designed to verify e.g. the JMP and JMP32 operations for JITs that
  1834. * emit different code depending on the magnitude of the immediate value.
  1835. */
  1836. static bool __bpf_match_jmp_cond(s64 v1, s64 v2, u8 op)
  1837. {
  1838. switch (op) {
  1839. case BPF_JSET:
  1840. return !!(v1 & v2);
  1841. case BPF_JEQ:
  1842. return v1 == v2;
  1843. case BPF_JNE:
  1844. return v1 != v2;
  1845. case BPF_JGT:
  1846. return (u64)v1 > (u64)v2;
  1847. case BPF_JGE:
  1848. return (u64)v1 >= (u64)v2;
  1849. case BPF_JLT:
  1850. return (u64)v1 < (u64)v2;
  1851. case BPF_JLE:
  1852. return (u64)v1 <= (u64)v2;
  1853. case BPF_JSGT:
  1854. return v1 > v2;
  1855. case BPF_JSGE:
  1856. return v1 >= v2;
  1857. case BPF_JSLT:
  1858. return v1 < v2;
  1859. case BPF_JSLE:
  1860. return v1 <= v2;
  1861. }
  1862. return false;
  1863. }
  1864. static int __bpf_emit_jmp_imm(struct bpf_test *self, void *arg,
  1865. struct bpf_insn *insns, s64 dst, s64 imm)
  1866. {
  1867. int op = *(int *)arg;
  1868. if (insns) {
  1869. bool match = __bpf_match_jmp_cond(dst, (s32)imm, op);
  1870. int i = 0;
  1871. insns[i++] = BPF_ALU32_IMM(BPF_MOV, R0, match);
  1872. i += __bpf_ld_imm64(&insns[i], R1, dst);
  1873. insns[i++] = BPF_JMP_IMM(op, R1, imm, 1);
  1874. if (!match)
  1875. insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
  1876. insns[i++] = BPF_EXIT_INSN();
  1877. return i;
  1878. }
  1879. return 5 + 1;
  1880. }
  1881. static int __bpf_emit_jmp32_imm(struct bpf_test *self, void *arg,
  1882. struct bpf_insn *insns, s64 dst, s64 imm)
  1883. {
  1884. int op = *(int *)arg;
  1885. if (insns) {
  1886. bool match = __bpf_match_jmp_cond((s32)dst, (s32)imm, op);
  1887. int i = 0;
  1888. i += __bpf_ld_imm64(&insns[i], R1, dst);
  1889. insns[i++] = BPF_JMP32_IMM(op, R1, imm, 1);
  1890. if (!match)
  1891. insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
  1892. insns[i++] = BPF_EXIT_INSN();
  1893. return i;
  1894. }
  1895. return 5;
  1896. }
  1897. static int __bpf_emit_jmp_reg(struct bpf_test *self, void *arg,
  1898. struct bpf_insn *insns, s64 dst, s64 src)
  1899. {
  1900. int op = *(int *)arg;
  1901. if (insns) {
  1902. bool match = __bpf_match_jmp_cond(dst, src, op);
  1903. int i = 0;
  1904. i += __bpf_ld_imm64(&insns[i], R1, dst);
  1905. i += __bpf_ld_imm64(&insns[i], R2, src);
  1906. insns[i++] = BPF_JMP_REG(op, R1, R2, 1);
  1907. if (!match)
  1908. insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
  1909. insns[i++] = BPF_EXIT_INSN();
  1910. return i;
  1911. }
  1912. return 7;
  1913. }
  1914. static int __bpf_emit_jmp32_reg(struct bpf_test *self, void *arg,
  1915. struct bpf_insn *insns, s64 dst, s64 src)
  1916. {
  1917. int op = *(int *)arg;
  1918. if (insns) {
  1919. bool match = __bpf_match_jmp_cond((s32)dst, (s32)src, op);
  1920. int i = 0;
  1921. i += __bpf_ld_imm64(&insns[i], R1, dst);
  1922. i += __bpf_ld_imm64(&insns[i], R2, src);
  1923. insns[i++] = BPF_JMP32_REG(op, R1, R2, 1);
  1924. if (!match)
  1925. insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
  1926. insns[i++] = BPF_EXIT_INSN();
  1927. return i;
  1928. }
  1929. return 7;
  1930. }
  1931. static int __bpf_fill_jmp_imm(struct bpf_test *self, int op)
  1932. {
  1933. return __bpf_fill_pattern(self, &op, 64, 32,
  1934. PATTERN_BLOCK1, PATTERN_BLOCK2,
  1935. &__bpf_emit_jmp_imm);
  1936. }
  1937. static int __bpf_fill_jmp32_imm(struct bpf_test *self, int op)
  1938. {
  1939. return __bpf_fill_pattern(self, &op, 64, 32,
  1940. PATTERN_BLOCK1, PATTERN_BLOCK2,
  1941. &__bpf_emit_jmp32_imm);
  1942. }
  1943. static int __bpf_fill_jmp_reg(struct bpf_test *self, int op)
  1944. {
  1945. return __bpf_fill_pattern(self, &op, 64, 64,
  1946. PATTERN_BLOCK1, PATTERN_BLOCK2,
  1947. &__bpf_emit_jmp_reg);
  1948. }
  1949. static int __bpf_fill_jmp32_reg(struct bpf_test *self, int op)
  1950. {
  1951. return __bpf_fill_pattern(self, &op, 64, 64,
  1952. PATTERN_BLOCK1, PATTERN_BLOCK2,
  1953. &__bpf_emit_jmp32_reg);
  1954. }
  1955. /* JMP immediate tests */
  1956. static int bpf_fill_jmp_jset_imm(struct bpf_test *self)
  1957. {
  1958. return __bpf_fill_jmp_imm(self, BPF_JSET);
  1959. }
  1960. static int bpf_fill_jmp_jeq_imm(struct bpf_test *self)
  1961. {
  1962. return __bpf_fill_jmp_imm(self, BPF_JEQ);
  1963. }
  1964. static int bpf_fill_jmp_jne_imm(struct bpf_test *self)
  1965. {
  1966. return __bpf_fill_jmp_imm(self, BPF_JNE);
  1967. }
  1968. static int bpf_fill_jmp_jgt_imm(struct bpf_test *self)
  1969. {
  1970. return __bpf_fill_jmp_imm(self, BPF_JGT);
  1971. }
  1972. static int bpf_fill_jmp_jge_imm(struct bpf_test *self)
  1973. {
  1974. return __bpf_fill_jmp_imm(self, BPF_JGE);
  1975. }
  1976. static int bpf_fill_jmp_jlt_imm(struct bpf_test *self)
  1977. {
  1978. return __bpf_fill_jmp_imm(self, BPF_JLT);
  1979. }
  1980. static int bpf_fill_jmp_jle_imm(struct bpf_test *self)
  1981. {
  1982. return __bpf_fill_jmp_imm(self, BPF_JLE);
  1983. }
  1984. static int bpf_fill_jmp_jsgt_imm(struct bpf_test *self)
  1985. {
  1986. return __bpf_fill_jmp_imm(self, BPF_JSGT);
  1987. }
  1988. static int bpf_fill_jmp_jsge_imm(struct bpf_test *self)
  1989. {
  1990. return __bpf_fill_jmp_imm(self, BPF_JSGE);
  1991. }
  1992. static int bpf_fill_jmp_jslt_imm(struct bpf_test *self)
  1993. {
  1994. return __bpf_fill_jmp_imm(self, BPF_JSLT);
  1995. }
  1996. static int bpf_fill_jmp_jsle_imm(struct bpf_test *self)
  1997. {
  1998. return __bpf_fill_jmp_imm(self, BPF_JSLE);
  1999. }
  2000. /* JMP32 immediate tests */
  2001. static int bpf_fill_jmp32_jset_imm(struct bpf_test *self)
  2002. {
  2003. return __bpf_fill_jmp32_imm(self, BPF_JSET);
  2004. }
  2005. static int bpf_fill_jmp32_jeq_imm(struct bpf_test *self)
  2006. {
  2007. return __bpf_fill_jmp32_imm(self, BPF_JEQ);
  2008. }
  2009. static int bpf_fill_jmp32_jne_imm(struct bpf_test *self)
  2010. {
  2011. return __bpf_fill_jmp32_imm(self, BPF_JNE);
  2012. }
  2013. static int bpf_fill_jmp32_jgt_imm(struct bpf_test *self)
  2014. {
  2015. return __bpf_fill_jmp32_imm(self, BPF_JGT);
  2016. }
  2017. static int bpf_fill_jmp32_jge_imm(struct bpf_test *self)
  2018. {
  2019. return __bpf_fill_jmp32_imm(self, BPF_JGE);
  2020. }
  2021. static int bpf_fill_jmp32_jlt_imm(struct bpf_test *self)
  2022. {
  2023. return __bpf_fill_jmp32_imm(self, BPF_JLT);
  2024. }
  2025. static int bpf_fill_jmp32_jle_imm(struct bpf_test *self)
  2026. {
  2027. return __bpf_fill_jmp32_imm(self, BPF_JLE);
  2028. }
  2029. static int bpf_fill_jmp32_jsgt_imm(struct bpf_test *self)
  2030. {
  2031. return __bpf_fill_jmp32_imm(self, BPF_JSGT);
  2032. }
  2033. static int bpf_fill_jmp32_jsge_imm(struct bpf_test *self)
  2034. {
  2035. return __bpf_fill_jmp32_imm(self, BPF_JSGE);
  2036. }
  2037. static int bpf_fill_jmp32_jslt_imm(struct bpf_test *self)
  2038. {
  2039. return __bpf_fill_jmp32_imm(self, BPF_JSLT);
  2040. }
  2041. static int bpf_fill_jmp32_jsle_imm(struct bpf_test *self)
  2042. {
  2043. return __bpf_fill_jmp32_imm(self, BPF_JSLE);
  2044. }
  2045. /* JMP register tests */
  2046. static int bpf_fill_jmp_jset_reg(struct bpf_test *self)
  2047. {
  2048. return __bpf_fill_jmp_reg(self, BPF_JSET);
  2049. }
  2050. static int bpf_fill_jmp_jeq_reg(struct bpf_test *self)
  2051. {
  2052. return __bpf_fill_jmp_reg(self, BPF_JEQ);
  2053. }
  2054. static int bpf_fill_jmp_jne_reg(struct bpf_test *self)
  2055. {
  2056. return __bpf_fill_jmp_reg(self, BPF_JNE);
  2057. }
  2058. static int bpf_fill_jmp_jgt_reg(struct bpf_test *self)
  2059. {
  2060. return __bpf_fill_jmp_reg(self, BPF_JGT);
  2061. }
  2062. static int bpf_fill_jmp_jge_reg(struct bpf_test *self)
  2063. {
  2064. return __bpf_fill_jmp_reg(self, BPF_JGE);
  2065. }
  2066. static int bpf_fill_jmp_jlt_reg(struct bpf_test *self)
  2067. {
  2068. return __bpf_fill_jmp_reg(self, BPF_JLT);
  2069. }
  2070. static int bpf_fill_jmp_jle_reg(struct bpf_test *self)
  2071. {
  2072. return __bpf_fill_jmp_reg(self, BPF_JLE);
  2073. }
  2074. static int bpf_fill_jmp_jsgt_reg(struct bpf_test *self)
  2075. {
  2076. return __bpf_fill_jmp_reg(self, BPF_JSGT);
  2077. }
  2078. static int bpf_fill_jmp_jsge_reg(struct bpf_test *self)
  2079. {
  2080. return __bpf_fill_jmp_reg(self, BPF_JSGE);
  2081. }
  2082. static int bpf_fill_jmp_jslt_reg(struct bpf_test *self)
  2083. {
  2084. return __bpf_fill_jmp_reg(self, BPF_JSLT);
  2085. }
  2086. static int bpf_fill_jmp_jsle_reg(struct bpf_test *self)
  2087. {
  2088. return __bpf_fill_jmp_reg(self, BPF_JSLE);
  2089. }
  2090. /* JMP32 register tests */
  2091. static int bpf_fill_jmp32_jset_reg(struct bpf_test *self)
  2092. {
  2093. return __bpf_fill_jmp32_reg(self, BPF_JSET);
  2094. }
  2095. static int bpf_fill_jmp32_jeq_reg(struct bpf_test *self)
  2096. {
  2097. return __bpf_fill_jmp32_reg(self, BPF_JEQ);
  2098. }
  2099. static int bpf_fill_jmp32_jne_reg(struct bpf_test *self)
  2100. {
  2101. return __bpf_fill_jmp32_reg(self, BPF_JNE);
  2102. }
  2103. static int bpf_fill_jmp32_jgt_reg(struct bpf_test *self)
  2104. {
  2105. return __bpf_fill_jmp32_reg(self, BPF_JGT);
  2106. }
  2107. static int bpf_fill_jmp32_jge_reg(struct bpf_test *self)
  2108. {
  2109. return __bpf_fill_jmp32_reg(self, BPF_JGE);
  2110. }
  2111. static int bpf_fill_jmp32_jlt_reg(struct bpf_test *self)
  2112. {
  2113. return __bpf_fill_jmp32_reg(self, BPF_JLT);
  2114. }
  2115. static int bpf_fill_jmp32_jle_reg(struct bpf_test *self)
  2116. {
  2117. return __bpf_fill_jmp32_reg(self, BPF_JLE);
  2118. }
  2119. static int bpf_fill_jmp32_jsgt_reg(struct bpf_test *self)
  2120. {
  2121. return __bpf_fill_jmp32_reg(self, BPF_JSGT);
  2122. }
  2123. static int bpf_fill_jmp32_jsge_reg(struct bpf_test *self)
  2124. {
  2125. return __bpf_fill_jmp32_reg(self, BPF_JSGE);
  2126. }
  2127. static int bpf_fill_jmp32_jslt_reg(struct bpf_test *self)
  2128. {
  2129. return __bpf_fill_jmp32_reg(self, BPF_JSLT);
  2130. }
  2131. static int bpf_fill_jmp32_jsle_reg(struct bpf_test *self)
  2132. {
  2133. return __bpf_fill_jmp32_reg(self, BPF_JSLE);
  2134. }
  2135. /*
  2136. * Set up a sequence of staggered jumps, forwards and backwards with
  2137. * increasing offset. This tests the conversion of relative jumps to
  2138. * JITed native jumps. On some architectures, for example MIPS, a large
  2139. * PC-relative jump offset may overflow the immediate field of the native
  2140. * conditional branch instruction, triggering a conversion to use an
  2141. * absolute jump instead. Since this changes the jump offsets, another
  2142. * offset computation pass is necessary, and that may in turn trigger
  2143. * another branch conversion. This jump sequence is particularly nasty
  2144. * in that regard.
  2145. *
  2146. * The sequence generation is parameterized by size and jump type.
  2147. * The size must be even, and the expected result is always size + 1.
  2148. * Below is an example with size=8 and result=9.
  2149. *
  2150. * ________________________Start
  2151. * R0 = 0
  2152. * R1 = r1
  2153. * R2 = r2
  2154. * ,------- JMP +4 * 3______________Preamble: 4 insns
  2155. * ,----------|-ind 0- if R0 != 7 JMP 8 * 3 + 1 <--------------------.
  2156. * | | R0 = 8 |
  2157. * | | JMP +7 * 3 ------------------------.
  2158. * | ,--------|-----1- if R0 != 5 JMP 7 * 3 + 1 <--------------. | |
  2159. * | | | R0 = 6 | | |
  2160. * | | | JMP +5 * 3 ------------------. | |
  2161. * | | ,------|-----2- if R0 != 3 JMP 6 * 3 + 1 <--------. | | | |
  2162. * | | | | R0 = 4 | | | | |
  2163. * | | | | JMP +3 * 3 ------------. | | | |
  2164. * | | | ,----|-----3- if R0 != 1 JMP 5 * 3 + 1 <--. | | | | | |
  2165. * | | | | | R0 = 2 | | | | | | |
  2166. * | | | | | JMP +1 * 3 ------. | | | | | |
  2167. * | | | | ,--t=====4> if R0 != 0 JMP 4 * 3 + 1 1 2 3 4 5 6 7 8 loc
  2168. * | | | | | R0 = 1 -1 +2 -3 +4 -5 +6 -7 +8 off
  2169. * | | | | | JMP -2 * 3 ---' | | | | | | |
  2170. * | | | | | ,------5- if R0 != 2 JMP 3 * 3 + 1 <-----' | | | | | |
  2171. * | | | | | | R0 = 3 | | | | | |
  2172. * | | | | | | JMP -4 * 3 ---------' | | | | |
  2173. * | | | | | | ,----6- if R0 != 4 JMP 2 * 3 + 1 <-----------' | | | |
  2174. * | | | | | | | R0 = 5 | | | |
  2175. * | | | | | | | JMP -6 * 3 ---------------' | | |
  2176. * | | | | | | | ,--7- if R0 != 6 JMP 1 * 3 + 1 <-----------------' | |
  2177. * | | | | | | | | R0 = 7 | |
  2178. * | | Error | | | JMP -8 * 3 ---------------------' |
  2179. * | | paths | | | ,8- if R0 != 8 JMP 0 * 3 + 1 <-----------------------'
  2180. * | | | | | | | | | R0 = 9__________________Sequence: 3 * size - 1 insns
  2181. * `-+-+-+-+-+-+-+-+-> EXIT____________________Return: 1 insn
  2182. *
  2183. */
  2184. /* The maximum size parameter */
  2185. #define MAX_STAGGERED_JMP_SIZE ((0x7fff / 3) & ~1)
  2186. /* We use a reduced number of iterations to get a reasonable execution time */
  2187. #define NR_STAGGERED_JMP_RUNS 10
  2188. static int __bpf_fill_staggered_jumps(struct bpf_test *self,
  2189. const struct bpf_insn *jmp,
  2190. u64 r1, u64 r2)
  2191. {
  2192. int size = self->test[0].result - 1;
  2193. int len = 4 + 3 * (size + 1);
  2194. struct bpf_insn *insns;
  2195. int off, ind;
  2196. insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
  2197. if (!insns)
  2198. return -ENOMEM;
  2199. /* Preamble */
  2200. insns[0] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
  2201. insns[1] = BPF_ALU64_IMM(BPF_MOV, R1, r1);
  2202. insns[2] = BPF_ALU64_IMM(BPF_MOV, R2, r2);
  2203. insns[3] = BPF_JMP_IMM(BPF_JA, 0, 0, 3 * size / 2);
  2204. /* Sequence */
  2205. for (ind = 0, off = size; ind <= size; ind++, off -= 2) {
  2206. struct bpf_insn *ins = &insns[4 + 3 * ind];
  2207. int loc;
  2208. if (off == 0)
  2209. off--;
  2210. loc = abs(off);
  2211. ins[0] = BPF_JMP_IMM(BPF_JNE, R0, loc - 1,
  2212. 3 * (size - ind) + 1);
  2213. ins[1] = BPF_ALU64_IMM(BPF_MOV, R0, loc);
  2214. ins[2] = *jmp;
  2215. ins[2].off = 3 * (off - 1);
  2216. }
  2217. /* Return */
  2218. insns[len - 1] = BPF_EXIT_INSN();
  2219. self->u.ptr.insns = insns;
  2220. self->u.ptr.len = len;
  2221. return 0;
  2222. }
  2223. /* 64-bit unconditional jump */
  2224. static int bpf_fill_staggered_ja(struct bpf_test *self)
  2225. {
  2226. struct bpf_insn jmp = BPF_JMP_IMM(BPF_JA, 0, 0, 0);
  2227. return __bpf_fill_staggered_jumps(self, &jmp, 0, 0);
  2228. }
  2229. /* 64-bit immediate jumps */
  2230. static int bpf_fill_staggered_jeq_imm(struct bpf_test *self)
  2231. {
  2232. struct bpf_insn jmp = BPF_JMP_IMM(BPF_JEQ, R1, 1234, 0);
  2233. return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
  2234. }
  2235. static int bpf_fill_staggered_jne_imm(struct bpf_test *self)
  2236. {
  2237. struct bpf_insn jmp = BPF_JMP_IMM(BPF_JNE, R1, 1234, 0);
  2238. return __bpf_fill_staggered_jumps(self, &jmp, 4321, 0);
  2239. }
  2240. static int bpf_fill_staggered_jset_imm(struct bpf_test *self)
  2241. {
  2242. struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSET, R1, 0x82, 0);
  2243. return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0);
  2244. }
  2245. static int bpf_fill_staggered_jgt_imm(struct bpf_test *self)
  2246. {
  2247. struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGT, R1, 1234, 0);
  2248. return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 0);
  2249. }
  2250. static int bpf_fill_staggered_jge_imm(struct bpf_test *self)
  2251. {
  2252. struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGE, R1, 1234, 0);
  2253. return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
  2254. }
  2255. static int bpf_fill_staggered_jlt_imm(struct bpf_test *self)
  2256. {
  2257. struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLT, R1, 0x80000000, 0);
  2258. return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
  2259. }
  2260. static int bpf_fill_staggered_jle_imm(struct bpf_test *self)
  2261. {
  2262. struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLE, R1, 1234, 0);
  2263. return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
  2264. }
  2265. static int bpf_fill_staggered_jsgt_imm(struct bpf_test *self)
  2266. {
  2267. struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGT, R1, -2, 0);
  2268. return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
  2269. }
  2270. static int bpf_fill_staggered_jsge_imm(struct bpf_test *self)
  2271. {
  2272. struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGE, R1, -2, 0);
  2273. return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
  2274. }
  2275. static int bpf_fill_staggered_jslt_imm(struct bpf_test *self)
  2276. {
  2277. struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLT, R1, -1, 0);
  2278. return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
  2279. }
  2280. static int bpf_fill_staggered_jsle_imm(struct bpf_test *self)
  2281. {
  2282. struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLE, R1, -1, 0);
  2283. return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
  2284. }
  2285. /* 64-bit register jumps */
  2286. static int bpf_fill_staggered_jeq_reg(struct bpf_test *self)
  2287. {
  2288. struct bpf_insn jmp = BPF_JMP_REG(BPF_JEQ, R1, R2, 0);
  2289. return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
  2290. }
  2291. static int bpf_fill_staggered_jne_reg(struct bpf_test *self)
  2292. {
  2293. struct bpf_insn jmp = BPF_JMP_REG(BPF_JNE, R1, R2, 0);
  2294. return __bpf_fill_staggered_jumps(self, &jmp, 4321, 1234);
  2295. }
  2296. static int bpf_fill_staggered_jset_reg(struct bpf_test *self)
  2297. {
  2298. struct bpf_insn jmp = BPF_JMP_REG(BPF_JSET, R1, R2, 0);
  2299. return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0x82);
  2300. }
  2301. static int bpf_fill_staggered_jgt_reg(struct bpf_test *self)
  2302. {
  2303. struct bpf_insn jmp = BPF_JMP_REG(BPF_JGT, R1, R2, 0);
  2304. return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 1234);
  2305. }
  2306. static int bpf_fill_staggered_jge_reg(struct bpf_test *self)
  2307. {
  2308. struct bpf_insn jmp = BPF_JMP_REG(BPF_JGE, R1, R2, 0);
  2309. return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
  2310. }
  2311. static int bpf_fill_staggered_jlt_reg(struct bpf_test *self)
  2312. {
  2313. struct bpf_insn jmp = BPF_JMP_REG(BPF_JLT, R1, R2, 0);
  2314. return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0x80000000);
  2315. }
  2316. static int bpf_fill_staggered_jle_reg(struct bpf_test *self)
  2317. {
  2318. struct bpf_insn jmp = BPF_JMP_REG(BPF_JLE, R1, R2, 0);
  2319. return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
  2320. }
  2321. static int bpf_fill_staggered_jsgt_reg(struct bpf_test *self)
  2322. {
  2323. struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGT, R1, R2, 0);
  2324. return __bpf_fill_staggered_jumps(self, &jmp, -1, -2);
  2325. }
  2326. static int bpf_fill_staggered_jsge_reg(struct bpf_test *self)
  2327. {
  2328. struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGE, R1, R2, 0);
  2329. return __bpf_fill_staggered_jumps(self, &jmp, -2, -2);
  2330. }
  2331. static int bpf_fill_staggered_jslt_reg(struct bpf_test *self)
  2332. {
  2333. struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLT, R1, R2, 0);
  2334. return __bpf_fill_staggered_jumps(self, &jmp, -2, -1);
  2335. }
  2336. static int bpf_fill_staggered_jsle_reg(struct bpf_test *self)
  2337. {
  2338. struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLE, R1, R2, 0);
  2339. return __bpf_fill_staggered_jumps(self, &jmp, -1, -1);
  2340. }
  2341. /* 32-bit immediate jumps */
  2342. static int bpf_fill_staggered_jeq32_imm(struct bpf_test *self)
  2343. {
  2344. struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JEQ, R1, 1234, 0);
  2345. return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
  2346. }
  2347. static int bpf_fill_staggered_jne32_imm(struct bpf_test *self)
  2348. {
  2349. struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JNE, R1, 1234, 0);
  2350. return __bpf_fill_staggered_jumps(self, &jmp, 4321, 0);
  2351. }
  2352. static int bpf_fill_staggered_jset32_imm(struct bpf_test *self)
  2353. {
  2354. struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSET, R1, 0x82, 0);
  2355. return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0);
  2356. }
  2357. static int bpf_fill_staggered_jgt32_imm(struct bpf_test *self)
  2358. {
  2359. struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGT, R1, 1234, 0);
  2360. return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 0);
  2361. }
  2362. static int bpf_fill_staggered_jge32_imm(struct bpf_test *self)
  2363. {
  2364. struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGE, R1, 1234, 0);
  2365. return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
  2366. }
  2367. static int bpf_fill_staggered_jlt32_imm(struct bpf_test *self)
  2368. {
  2369. struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLT, R1, 0x80000000, 0);
  2370. return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
  2371. }
  2372. static int bpf_fill_staggered_jle32_imm(struct bpf_test *self)
  2373. {
  2374. struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLE, R1, 1234, 0);
  2375. return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
  2376. }
  2377. static int bpf_fill_staggered_jsgt32_imm(struct bpf_test *self)
  2378. {
  2379. struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGT, R1, -2, 0);
  2380. return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
  2381. }
  2382. static int bpf_fill_staggered_jsge32_imm(struct bpf_test *self)
  2383. {
  2384. struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGE, R1, -2, 0);
  2385. return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
  2386. }
  2387. static int bpf_fill_staggered_jslt32_imm(struct bpf_test *self)
  2388. {
  2389. struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLT, R1, -1, 0);
  2390. return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
  2391. }
  2392. static int bpf_fill_staggered_jsle32_imm(struct bpf_test *self)
  2393. {
  2394. struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLE, R1, -1, 0);
  2395. return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
  2396. }
  2397. /* 32-bit register jumps */
  2398. static int bpf_fill_staggered_jeq32_reg(struct bpf_test *self)
  2399. {
  2400. struct bpf_insn jmp = BPF_JMP32_REG(BPF_JEQ, R1, R2, 0);
  2401. return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
  2402. }
  2403. static int bpf_fill_staggered_jne32_reg(struct bpf_test *self)
  2404. {
  2405. struct bpf_insn jmp = BPF_JMP32_REG(BPF_JNE, R1, R2, 0);
  2406. return __bpf_fill_staggered_jumps(self, &jmp, 4321, 1234);
  2407. }
  2408. static int bpf_fill_staggered_jset32_reg(struct bpf_test *self)
  2409. {
  2410. struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSET, R1, R2, 0);
  2411. return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0x82);
  2412. }
  2413. static int bpf_fill_staggered_jgt32_reg(struct bpf_test *self)
  2414. {
  2415. struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGT, R1, R2, 0);
  2416. return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 1234);
  2417. }
  2418. static int bpf_fill_staggered_jge32_reg(struct bpf_test *self)
  2419. {
  2420. struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGE, R1, R2, 0);
  2421. return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
  2422. }
  2423. static int bpf_fill_staggered_jlt32_reg(struct bpf_test *self)
  2424. {
  2425. struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLT, R1, R2, 0);
  2426. return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0x80000000);
  2427. }
  2428. static int bpf_fill_staggered_jle32_reg(struct bpf_test *self)
  2429. {
  2430. struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLE, R1, R2, 0);
  2431. return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
  2432. }
  2433. static int bpf_fill_staggered_jsgt32_reg(struct bpf_test *self)
  2434. {
  2435. struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGT, R1, R2, 0);
  2436. return __bpf_fill_staggered_jumps(self, &jmp, -1, -2);
  2437. }
  2438. static int bpf_fill_staggered_jsge32_reg(struct bpf_test *self)
  2439. {
  2440. struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGE, R1, R2, 0);
  2441. return __bpf_fill_staggered_jumps(self, &jmp, -2, -2);
  2442. }
  2443. static int bpf_fill_staggered_jslt32_reg(struct bpf_test *self)
  2444. {
  2445. struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLT, R1, R2, 0);
  2446. return __bpf_fill_staggered_jumps(self, &jmp, -2, -1);
  2447. }
  2448. static int bpf_fill_staggered_jsle32_reg(struct bpf_test *self)
  2449. {
  2450. struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLE, R1, R2, 0);
  2451. return __bpf_fill_staggered_jumps(self, &jmp, -1, -1);
  2452. }
  2453. static struct bpf_test tests[] = {
  2454. {
  2455. "TAX",
  2456. .u.insns = {
  2457. BPF_STMT(BPF_LD | BPF_IMM, 1),
  2458. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  2459. BPF_STMT(BPF_LD | BPF_IMM, 2),
  2460. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  2461. BPF_STMT(BPF_ALU | BPF_NEG, 0), /* A == -3 */
  2462. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  2463. BPF_STMT(BPF_LD | BPF_LEN, 0),
  2464. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  2465. BPF_STMT(BPF_MISC | BPF_TAX, 0), /* X == len - 3 */
  2466. BPF_STMT(BPF_LD | BPF_B | BPF_IND, 1),
  2467. BPF_STMT(BPF_RET | BPF_A, 0)
  2468. },
  2469. CLASSIC,
  2470. { 10, 20, 30, 40, 50 },
  2471. { { 2, 10 }, { 3, 20 }, { 4, 30 } },
  2472. },
  2473. {
  2474. "TXA",
  2475. .u.insns = {
  2476. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  2477. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  2478. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  2479. BPF_STMT(BPF_RET | BPF_A, 0) /* A == len * 2 */
  2480. },
  2481. CLASSIC,
  2482. { 10, 20, 30, 40, 50 },
  2483. { { 1, 2 }, { 3, 6 }, { 4, 8 } },
  2484. },
  2485. {
  2486. "ADD_SUB_MUL_K",
  2487. .u.insns = {
  2488. BPF_STMT(BPF_LD | BPF_IMM, 1),
  2489. BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 2),
  2490. BPF_STMT(BPF_LDX | BPF_IMM, 3),
  2491. BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
  2492. BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0xffffffff),
  2493. BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 3),
  2494. BPF_STMT(BPF_RET | BPF_A, 0)
  2495. },
  2496. CLASSIC | FLAG_NO_DATA,
  2497. { },
  2498. { { 0, 0xfffffffd } }
  2499. },
  2500. {
  2501. "DIV_MOD_KX",
  2502. .u.insns = {
  2503. BPF_STMT(BPF_LD | BPF_IMM, 8),
  2504. BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 2),
  2505. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  2506. BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
  2507. BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0),
  2508. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  2509. BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
  2510. BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x70000000),
  2511. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  2512. BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
  2513. BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0),
  2514. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  2515. BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
  2516. BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x70000000),
  2517. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  2518. BPF_STMT(BPF_RET | BPF_A, 0)
  2519. },
  2520. CLASSIC | FLAG_NO_DATA,
  2521. { },
  2522. { { 0, 0x20000000 } }
  2523. },
  2524. {
  2525. "AND_OR_LSH_K",
  2526. .u.insns = {
  2527. BPF_STMT(BPF_LD | BPF_IMM, 0xff),
  2528. BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
  2529. BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 27),
  2530. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  2531. BPF_STMT(BPF_LD | BPF_IMM, 0xf),
  2532. BPF_STMT(BPF_ALU | BPF_OR | BPF_K, 0xf0),
  2533. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  2534. BPF_STMT(BPF_RET | BPF_A, 0)
  2535. },
  2536. CLASSIC | FLAG_NO_DATA,
  2537. { },
  2538. { { 0, 0x800000ff }, { 1, 0x800000ff } },
  2539. },
  2540. {
  2541. "LD_IMM_0",
  2542. .u.insns = {
  2543. BPF_STMT(BPF_LD | BPF_IMM, 0), /* ld #0 */
  2544. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0, 1, 0),
  2545. BPF_STMT(BPF_RET | BPF_K, 0),
  2546. BPF_STMT(BPF_RET | BPF_K, 1),
  2547. },
  2548. CLASSIC,
  2549. { },
  2550. { { 1, 1 } },
  2551. },
  2552. {
  2553. "LD_IND",
  2554. .u.insns = {
  2555. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  2556. BPF_STMT(BPF_LD | BPF_H | BPF_IND, MAX_K),
  2557. BPF_STMT(BPF_RET | BPF_K, 1)
  2558. },
  2559. CLASSIC,
  2560. { },
  2561. { { 1, 0 }, { 10, 0 }, { 60, 0 } },
  2562. },
  2563. {
  2564. "LD_ABS",
  2565. .u.insns = {
  2566. BPF_STMT(BPF_LD | BPF_W | BPF_ABS, 1000),
  2567. BPF_STMT(BPF_RET | BPF_K, 1)
  2568. },
  2569. CLASSIC,
  2570. { },
  2571. { { 1, 0 }, { 10, 0 }, { 60, 0 } },
  2572. },
  2573. {
  2574. "LD_ABS_LL",
  2575. .u.insns = {
  2576. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF),
  2577. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  2578. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF + 1),
  2579. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  2580. BPF_STMT(BPF_RET | BPF_A, 0)
  2581. },
  2582. CLASSIC,
  2583. { 1, 2, 3 },
  2584. { { 1, 0 }, { 2, 3 } },
  2585. },
  2586. {
  2587. "LD_IND_LL",
  2588. .u.insns = {
  2589. BPF_STMT(BPF_LD | BPF_IMM, SKF_LL_OFF - 1),
  2590. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  2591. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  2592. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  2593. BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
  2594. BPF_STMT(BPF_RET | BPF_A, 0)
  2595. },
  2596. CLASSIC,
  2597. { 1, 2, 3, 0xff },
  2598. { { 1, 1 }, { 3, 3 }, { 4, 0xff } },
  2599. },
  2600. {
  2601. "LD_ABS_NET",
  2602. .u.insns = {
  2603. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF),
  2604. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  2605. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF + 1),
  2606. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  2607. BPF_STMT(BPF_RET | BPF_A, 0)
  2608. },
  2609. CLASSIC,
  2610. { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
  2611. { { 15, 0 }, { 16, 3 } },
  2612. },
  2613. {
  2614. "LD_IND_NET",
  2615. .u.insns = {
  2616. BPF_STMT(BPF_LD | BPF_IMM, SKF_NET_OFF - 15),
  2617. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  2618. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  2619. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  2620. BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
  2621. BPF_STMT(BPF_RET | BPF_A, 0)
  2622. },
  2623. CLASSIC,
  2624. { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
  2625. { { 14, 0 }, { 15, 1 }, { 17, 3 } },
  2626. },
  2627. {
  2628. "LD_PKTTYPE",
  2629. .u.insns = {
  2630. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2631. SKF_AD_OFF + SKF_AD_PKTTYPE),
  2632. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
  2633. BPF_STMT(BPF_RET | BPF_K, 1),
  2634. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2635. SKF_AD_OFF + SKF_AD_PKTTYPE),
  2636. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
  2637. BPF_STMT(BPF_RET | BPF_K, 1),
  2638. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2639. SKF_AD_OFF + SKF_AD_PKTTYPE),
  2640. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
  2641. BPF_STMT(BPF_RET | BPF_K, 1),
  2642. BPF_STMT(BPF_RET | BPF_A, 0)
  2643. },
  2644. CLASSIC,
  2645. { },
  2646. { { 1, 3 }, { 10, 3 } },
  2647. },
  2648. {
  2649. "LD_MARK",
  2650. .u.insns = {
  2651. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2652. SKF_AD_OFF + SKF_AD_MARK),
  2653. BPF_STMT(BPF_RET | BPF_A, 0)
  2654. },
  2655. CLASSIC,
  2656. { },
  2657. { { 1, SKB_MARK}, { 10, SKB_MARK} },
  2658. },
  2659. {
  2660. "LD_RXHASH",
  2661. .u.insns = {
  2662. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2663. SKF_AD_OFF + SKF_AD_RXHASH),
  2664. BPF_STMT(BPF_RET | BPF_A, 0)
  2665. },
  2666. CLASSIC,
  2667. { },
  2668. { { 1, SKB_HASH}, { 10, SKB_HASH} },
  2669. },
  2670. {
  2671. "LD_QUEUE",
  2672. .u.insns = {
  2673. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2674. SKF_AD_OFF + SKF_AD_QUEUE),
  2675. BPF_STMT(BPF_RET | BPF_A, 0)
  2676. },
  2677. CLASSIC,
  2678. { },
  2679. { { 1, SKB_QUEUE_MAP }, { 10, SKB_QUEUE_MAP } },
  2680. },
  2681. {
  2682. "LD_PROTOCOL",
  2683. .u.insns = {
  2684. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 1),
  2685. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 20, 1, 0),
  2686. BPF_STMT(BPF_RET | BPF_K, 0),
  2687. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2688. SKF_AD_OFF + SKF_AD_PROTOCOL),
  2689. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  2690. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
  2691. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 30, 1, 0),
  2692. BPF_STMT(BPF_RET | BPF_K, 0),
  2693. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  2694. BPF_STMT(BPF_RET | BPF_A, 0)
  2695. },
  2696. CLASSIC,
  2697. { 10, 20, 30 },
  2698. { { 10, ETH_P_IP }, { 100, ETH_P_IP } },
  2699. },
  2700. {
  2701. "LD_VLAN_TAG",
  2702. .u.insns = {
  2703. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2704. SKF_AD_OFF + SKF_AD_VLAN_TAG),
  2705. BPF_STMT(BPF_RET | BPF_A, 0)
  2706. },
  2707. CLASSIC,
  2708. { },
  2709. {
  2710. { 1, SKB_VLAN_TCI },
  2711. { 10, SKB_VLAN_TCI }
  2712. },
  2713. },
  2714. {
  2715. "LD_VLAN_TAG_PRESENT",
  2716. .u.insns = {
  2717. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2718. SKF_AD_OFF + SKF_AD_VLAN_TAG_PRESENT),
  2719. BPF_STMT(BPF_RET | BPF_A, 0)
  2720. },
  2721. CLASSIC,
  2722. { },
  2723. {
  2724. { 1, SKB_VLAN_PRESENT },
  2725. { 10, SKB_VLAN_PRESENT }
  2726. },
  2727. },
  2728. {
  2729. "LD_IFINDEX",
  2730. .u.insns = {
  2731. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2732. SKF_AD_OFF + SKF_AD_IFINDEX),
  2733. BPF_STMT(BPF_RET | BPF_A, 0)
  2734. },
  2735. CLASSIC,
  2736. { },
  2737. { { 1, SKB_DEV_IFINDEX }, { 10, SKB_DEV_IFINDEX } },
  2738. },
  2739. {
  2740. "LD_HATYPE",
  2741. .u.insns = {
  2742. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2743. SKF_AD_OFF + SKF_AD_HATYPE),
  2744. BPF_STMT(BPF_RET | BPF_A, 0)
  2745. },
  2746. CLASSIC,
  2747. { },
  2748. { { 1, SKB_DEV_TYPE }, { 10, SKB_DEV_TYPE } },
  2749. },
  2750. {
  2751. "LD_CPU",
  2752. .u.insns = {
  2753. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2754. SKF_AD_OFF + SKF_AD_CPU),
  2755. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  2756. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2757. SKF_AD_OFF + SKF_AD_CPU),
  2758. BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
  2759. BPF_STMT(BPF_RET | BPF_A, 0)
  2760. },
  2761. CLASSIC,
  2762. { },
  2763. { { 1, 0 }, { 10, 0 } },
  2764. },
  2765. {
  2766. "LD_NLATTR",
  2767. .u.insns = {
  2768. BPF_STMT(BPF_LDX | BPF_IMM, 2),
  2769. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  2770. BPF_STMT(BPF_LDX | BPF_IMM, 3),
  2771. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2772. SKF_AD_OFF + SKF_AD_NLATTR),
  2773. BPF_STMT(BPF_RET | BPF_A, 0)
  2774. },
  2775. CLASSIC,
  2776. #ifdef __BIG_ENDIAN
  2777. { 0xff, 0xff, 0, 4, 0, 2, 0, 4, 0, 3 },
  2778. #else
  2779. { 0xff, 0xff, 4, 0, 2, 0, 4, 0, 3, 0 },
  2780. #endif
  2781. { { 4, 0 }, { 20, 6 } },
  2782. },
  2783. {
  2784. "LD_NLATTR_NEST",
  2785. .u.insns = {
  2786. BPF_STMT(BPF_LD | BPF_IMM, 2),
  2787. BPF_STMT(BPF_LDX | BPF_IMM, 3),
  2788. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2789. SKF_AD_OFF + SKF_AD_NLATTR_NEST),
  2790. BPF_STMT(BPF_LD | BPF_IMM, 2),
  2791. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2792. SKF_AD_OFF + SKF_AD_NLATTR_NEST),
  2793. BPF_STMT(BPF_LD | BPF_IMM, 2),
  2794. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2795. SKF_AD_OFF + SKF_AD_NLATTR_NEST),
  2796. BPF_STMT(BPF_LD | BPF_IMM, 2),
  2797. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2798. SKF_AD_OFF + SKF_AD_NLATTR_NEST),
  2799. BPF_STMT(BPF_LD | BPF_IMM, 2),
  2800. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2801. SKF_AD_OFF + SKF_AD_NLATTR_NEST),
  2802. BPF_STMT(BPF_LD | BPF_IMM, 2),
  2803. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2804. SKF_AD_OFF + SKF_AD_NLATTR_NEST),
  2805. BPF_STMT(BPF_LD | BPF_IMM, 2),
  2806. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2807. SKF_AD_OFF + SKF_AD_NLATTR_NEST),
  2808. BPF_STMT(BPF_LD | BPF_IMM, 2),
  2809. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2810. SKF_AD_OFF + SKF_AD_NLATTR_NEST),
  2811. BPF_STMT(BPF_RET | BPF_A, 0)
  2812. },
  2813. CLASSIC,
  2814. #ifdef __BIG_ENDIAN
  2815. { 0xff, 0xff, 0, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3 },
  2816. #else
  2817. { 0xff, 0xff, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3, 0 },
  2818. #endif
  2819. { { 4, 0 }, { 20, 10 } },
  2820. },
  2821. {
  2822. "LD_PAYLOAD_OFF",
  2823. .u.insns = {
  2824. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2825. SKF_AD_OFF + SKF_AD_PAY_OFFSET),
  2826. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2827. SKF_AD_OFF + SKF_AD_PAY_OFFSET),
  2828. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2829. SKF_AD_OFF + SKF_AD_PAY_OFFSET),
  2830. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2831. SKF_AD_OFF + SKF_AD_PAY_OFFSET),
  2832. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2833. SKF_AD_OFF + SKF_AD_PAY_OFFSET),
  2834. BPF_STMT(BPF_RET | BPF_A, 0)
  2835. },
  2836. CLASSIC,
  2837. /* 00:00:00:00:00:00 > 00:00:00:00:00:00, ethtype IPv4 (0x0800),
  2838. * length 98: 127.0.0.1 > 127.0.0.1: ICMP echo request,
  2839. * id 9737, seq 1, length 64
  2840. */
  2841. { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  2842. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  2843. 0x08, 0x00,
  2844. 0x45, 0x00, 0x00, 0x54, 0xac, 0x8b, 0x40, 0x00, 0x40,
  2845. 0x01, 0x90, 0x1b, 0x7f, 0x00, 0x00, 0x01 },
  2846. { { 30, 0 }, { 100, 42 } },
  2847. },
  2848. {
  2849. "LD_ANC_XOR",
  2850. .u.insns = {
  2851. BPF_STMT(BPF_LD | BPF_IMM, 10),
  2852. BPF_STMT(BPF_LDX | BPF_IMM, 300),
  2853. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  2854. SKF_AD_OFF + SKF_AD_ALU_XOR_X),
  2855. BPF_STMT(BPF_RET | BPF_A, 0)
  2856. },
  2857. CLASSIC,
  2858. { },
  2859. { { 4, 0xA ^ 300 }, { 20, 0xA ^ 300 } },
  2860. },
  2861. {
  2862. "SPILL_FILL",
  2863. .u.insns = {
  2864. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  2865. BPF_STMT(BPF_LD | BPF_IMM, 2),
  2866. BPF_STMT(BPF_ALU | BPF_RSH, 1),
  2867. BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
  2868. BPF_STMT(BPF_ST, 1), /* M1 = 1 ^ len */
  2869. BPF_STMT(BPF_ALU | BPF_XOR | BPF_K, 0x80000000),
  2870. BPF_STMT(BPF_ST, 2), /* M2 = 1 ^ len ^ 0x80000000 */
  2871. BPF_STMT(BPF_STX, 15), /* M3 = len */
  2872. BPF_STMT(BPF_LDX | BPF_MEM, 1),
  2873. BPF_STMT(BPF_LD | BPF_MEM, 2),
  2874. BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
  2875. BPF_STMT(BPF_LDX | BPF_MEM, 15),
  2876. BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
  2877. BPF_STMT(BPF_RET | BPF_A, 0)
  2878. },
  2879. CLASSIC,
  2880. { },
  2881. { { 1, 0x80000001 }, { 2, 0x80000002 }, { 60, 0x80000000 ^ 60 } }
  2882. },
  2883. {
  2884. "JEQ",
  2885. .u.insns = {
  2886. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  2887. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
  2888. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 0, 1),
  2889. BPF_STMT(BPF_RET | BPF_K, 1),
  2890. BPF_STMT(BPF_RET | BPF_K, MAX_K)
  2891. },
  2892. CLASSIC,
  2893. { 3, 3, 3, 3, 3 },
  2894. { { 1, 0 }, { 3, 1 }, { 4, MAX_K } },
  2895. },
  2896. {
  2897. "JGT",
  2898. .u.insns = {
  2899. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  2900. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
  2901. BPF_JUMP(BPF_JMP | BPF_JGT | BPF_X, 0, 0, 1),
  2902. BPF_STMT(BPF_RET | BPF_K, 1),
  2903. BPF_STMT(BPF_RET | BPF_K, MAX_K)
  2904. },
  2905. CLASSIC,
  2906. { 4, 4, 4, 3, 3 },
  2907. { { 2, 0 }, { 3, 1 }, { 4, MAX_K } },
  2908. },
  2909. {
  2910. "JGE (jt 0), test 1",
  2911. .u.insns = {
  2912. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  2913. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
  2914. BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
  2915. BPF_STMT(BPF_RET | BPF_K, 1),
  2916. BPF_STMT(BPF_RET | BPF_K, MAX_K)
  2917. },
  2918. CLASSIC,
  2919. { 4, 4, 4, 3, 3 },
  2920. { { 2, 0 }, { 3, 1 }, { 4, 1 } },
  2921. },
  2922. {
  2923. "JGE (jt 0), test 2",
  2924. .u.insns = {
  2925. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  2926. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
  2927. BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
  2928. BPF_STMT(BPF_RET | BPF_K, 1),
  2929. BPF_STMT(BPF_RET | BPF_K, MAX_K)
  2930. },
  2931. CLASSIC,
  2932. { 4, 4, 5, 3, 3 },
  2933. { { 4, 1 }, { 5, 1 }, { 6, MAX_K } },
  2934. },
  2935. {
  2936. "JGE",
  2937. .u.insns = {
  2938. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  2939. BPF_STMT(BPF_LD | BPF_B | BPF_IND, MAX_K),
  2940. BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 1, 1, 0),
  2941. BPF_STMT(BPF_RET | BPF_K, 10),
  2942. BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 2, 1, 0),
  2943. BPF_STMT(BPF_RET | BPF_K, 20),
  2944. BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 3, 1, 0),
  2945. BPF_STMT(BPF_RET | BPF_K, 30),
  2946. BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 4, 1, 0),
  2947. BPF_STMT(BPF_RET | BPF_K, 40),
  2948. BPF_STMT(BPF_RET | BPF_K, MAX_K)
  2949. },
  2950. CLASSIC,
  2951. { 1, 2, 3, 4, 5 },
  2952. { { 1, 20 }, { 3, 40 }, { 5, MAX_K } },
  2953. },
  2954. {
  2955. "JSET",
  2956. .u.insns = {
  2957. BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
  2958. BPF_JUMP(BPF_JMP | BPF_JA, 1, 1, 1),
  2959. BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
  2960. BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
  2961. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  2962. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  2963. BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, 4),
  2964. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  2965. BPF_STMT(BPF_LD | BPF_W | BPF_IND, 0),
  2966. BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 1, 0, 1),
  2967. BPF_STMT(BPF_RET | BPF_K, 10),
  2968. BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x80000000, 0, 1),
  2969. BPF_STMT(BPF_RET | BPF_K, 20),
  2970. BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
  2971. BPF_STMT(BPF_RET | BPF_K, 30),
  2972. BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
  2973. BPF_STMT(BPF_RET | BPF_K, 30),
  2974. BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
  2975. BPF_STMT(BPF_RET | BPF_K, 30),
  2976. BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
  2977. BPF_STMT(BPF_RET | BPF_K, 30),
  2978. BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
  2979. BPF_STMT(BPF_RET | BPF_K, 30),
  2980. BPF_STMT(BPF_RET | BPF_K, MAX_K)
  2981. },
  2982. CLASSIC,
  2983. { 0, 0xAA, 0x55, 1 },
  2984. { { 4, 10 }, { 5, 20 }, { 6, MAX_K } },
  2985. },
  2986. {
  2987. "tcpdump port 22",
  2988. .u.insns = {
  2989. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
  2990. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 0, 8), /* IPv6 */
  2991. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 20),
  2992. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
  2993. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
  2994. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 17),
  2995. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 54),
  2996. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 14, 0),
  2997. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 56),
  2998. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 12, 13),
  2999. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0800, 0, 12), /* IPv4 */
  3000. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
  3001. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
  3002. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
  3003. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 8),
  3004. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
  3005. BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 6, 0),
  3006. BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
  3007. BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
  3008. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
  3009. BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
  3010. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 1),
  3011. BPF_STMT(BPF_RET | BPF_K, 0xffff),
  3012. BPF_STMT(BPF_RET | BPF_K, 0),
  3013. },
  3014. CLASSIC,
  3015. /* 3c:07:54:43:e5:76 > 10:bf:48:d6:43:d6, ethertype IPv4(0x0800)
  3016. * length 114: 10.1.1.149.49700 > 10.1.2.10.22: Flags [P.],
  3017. * seq 1305692979:1305693027, ack 3650467037, win 65535,
  3018. * options [nop,nop,TS val 2502645400 ecr 3971138], length 48
  3019. */
  3020. { 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
  3021. 0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
  3022. 0x08, 0x00,
  3023. 0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
  3024. 0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
  3025. 0x0a, 0x01, 0x01, 0x95, /* ip src */
  3026. 0x0a, 0x01, 0x02, 0x0a, /* ip dst */
  3027. 0xc2, 0x24,
  3028. 0x00, 0x16 /* dst port */ },
  3029. { { 10, 0 }, { 30, 0 }, { 100, 65535 } },
  3030. },
  3031. {
  3032. "tcpdump complex",
  3033. .u.insns = {
  3034. /* tcpdump -nei eth0 'tcp port 22 and (((ip[2:2] -
  3035. * ((ip[0]&0xf)<<2)) - ((tcp[12]&0xf0)>>2)) != 0) and
  3036. * (len > 115 or len < 30000000000)' -d
  3037. */
  3038. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
  3039. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 30, 0),
  3040. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x800, 0, 29),
  3041. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
  3042. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 0, 27),
  3043. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
  3044. BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 25, 0),
  3045. BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
  3046. BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
  3047. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
  3048. BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
  3049. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 20),
  3050. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 16),
  3051. BPF_STMT(BPF_ST, 1),
  3052. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 14),
  3053. BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf),
  3054. BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 2),
  3055. BPF_STMT(BPF_MISC | BPF_TAX, 0x5), /* libpcap emits K on TAX */
  3056. BPF_STMT(BPF_LD | BPF_MEM, 1),
  3057. BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
  3058. BPF_STMT(BPF_ST, 5),
  3059. BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
  3060. BPF_STMT(BPF_LD | BPF_B | BPF_IND, 26),
  3061. BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
  3062. BPF_STMT(BPF_ALU | BPF_RSH | BPF_K, 2),
  3063. BPF_STMT(BPF_MISC | BPF_TAX, 0x9), /* libpcap emits K on TAX */
  3064. BPF_STMT(BPF_LD | BPF_MEM, 5),
  3065. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 4, 0),
  3066. BPF_STMT(BPF_LD | BPF_LEN, 0),
  3067. BPF_JUMP(BPF_JMP | BPF_JGT | BPF_K, 0x73, 1, 0),
  3068. BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 0xfc23ac00, 1, 0),
  3069. BPF_STMT(BPF_RET | BPF_K, 0xffff),
  3070. BPF_STMT(BPF_RET | BPF_K, 0),
  3071. },
  3072. CLASSIC,
  3073. { 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
  3074. 0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
  3075. 0x08, 0x00,
  3076. 0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
  3077. 0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
  3078. 0x0a, 0x01, 0x01, 0x95, /* ip src */
  3079. 0x0a, 0x01, 0x02, 0x0a, /* ip dst */
  3080. 0xc2, 0x24,
  3081. 0x00, 0x16 /* dst port */ },
  3082. { { 10, 0 }, { 30, 0 }, { 100, 65535 } },
  3083. },
  3084. {
  3085. "RET_A",
  3086. .u.insns = {
  3087. /* check that uninitialized X and A contain zeros */
  3088. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  3089. BPF_STMT(BPF_RET | BPF_A, 0)
  3090. },
  3091. CLASSIC,
  3092. { },
  3093. { {1, 0}, {2, 0} },
  3094. },
  3095. {
  3096. "INT: ADD trivial",
  3097. .u.insns_int = {
  3098. BPF_ALU64_IMM(BPF_MOV, R1, 1),
  3099. BPF_ALU64_IMM(BPF_ADD, R1, 2),
  3100. BPF_ALU64_IMM(BPF_MOV, R2, 3),
  3101. BPF_ALU64_REG(BPF_SUB, R1, R2),
  3102. BPF_ALU64_IMM(BPF_ADD, R1, -1),
  3103. BPF_ALU64_IMM(BPF_MUL, R1, 3),
  3104. BPF_ALU64_REG(BPF_MOV, R0, R1),
  3105. BPF_EXIT_INSN(),
  3106. },
  3107. INTERNAL,
  3108. { },
  3109. { { 0, 0xfffffffd } }
  3110. },
  3111. {
  3112. "INT: MUL_X",
  3113. .u.insns_int = {
  3114. BPF_ALU64_IMM(BPF_MOV, R0, -1),
  3115. BPF_ALU64_IMM(BPF_MOV, R1, -1),
  3116. BPF_ALU64_IMM(BPF_MOV, R2, 3),
  3117. BPF_ALU64_REG(BPF_MUL, R1, R2),
  3118. BPF_JMP_IMM(BPF_JEQ, R1, 0xfffffffd, 1),
  3119. BPF_EXIT_INSN(),
  3120. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  3121. BPF_EXIT_INSN(),
  3122. },
  3123. INTERNAL,
  3124. { },
  3125. { { 0, 1 } }
  3126. },
  3127. {
  3128. "INT: MUL_X2",
  3129. .u.insns_int = {
  3130. BPF_ALU32_IMM(BPF_MOV, R0, -1),
  3131. BPF_ALU32_IMM(BPF_MOV, R1, -1),
  3132. BPF_ALU32_IMM(BPF_MOV, R2, 3),
  3133. BPF_ALU64_REG(BPF_MUL, R1, R2),
  3134. BPF_ALU64_IMM(BPF_RSH, R1, 8),
  3135. BPF_JMP_IMM(BPF_JEQ, R1, 0x2ffffff, 1),
  3136. BPF_EXIT_INSN(),
  3137. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  3138. BPF_EXIT_INSN(),
  3139. },
  3140. INTERNAL,
  3141. { },
  3142. { { 0, 1 } }
  3143. },
  3144. {
  3145. "INT: MUL32_X",
  3146. .u.insns_int = {
  3147. BPF_ALU32_IMM(BPF_MOV, R0, -1),
  3148. BPF_ALU64_IMM(BPF_MOV, R1, -1),
  3149. BPF_ALU32_IMM(BPF_MOV, R2, 3),
  3150. BPF_ALU32_REG(BPF_MUL, R1, R2),
  3151. BPF_ALU64_IMM(BPF_RSH, R1, 8),
  3152. BPF_JMP_IMM(BPF_JEQ, R1, 0xffffff, 1),
  3153. BPF_EXIT_INSN(),
  3154. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  3155. BPF_EXIT_INSN(),
  3156. },
  3157. INTERNAL,
  3158. { },
  3159. { { 0, 1 } }
  3160. },
  3161. {
  3162. /* Have to test all register combinations, since
  3163. * JITing of different registers will produce
  3164. * different asm code.
  3165. */
  3166. "INT: ADD 64-bit",
  3167. .u.insns_int = {
  3168. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  3169. BPF_ALU64_IMM(BPF_MOV, R1, 1),
  3170. BPF_ALU64_IMM(BPF_MOV, R2, 2),
  3171. BPF_ALU64_IMM(BPF_MOV, R3, 3),
  3172. BPF_ALU64_IMM(BPF_MOV, R4, 4),
  3173. BPF_ALU64_IMM(BPF_MOV, R5, 5),
  3174. BPF_ALU64_IMM(BPF_MOV, R6, 6),
  3175. BPF_ALU64_IMM(BPF_MOV, R7, 7),
  3176. BPF_ALU64_IMM(BPF_MOV, R8, 8),
  3177. BPF_ALU64_IMM(BPF_MOV, R9, 9),
  3178. BPF_ALU64_IMM(BPF_ADD, R0, 20),
  3179. BPF_ALU64_IMM(BPF_ADD, R1, 20),
  3180. BPF_ALU64_IMM(BPF_ADD, R2, 20),
  3181. BPF_ALU64_IMM(BPF_ADD, R3, 20),
  3182. BPF_ALU64_IMM(BPF_ADD, R4, 20),
  3183. BPF_ALU64_IMM(BPF_ADD, R5, 20),
  3184. BPF_ALU64_IMM(BPF_ADD, R6, 20),
  3185. BPF_ALU64_IMM(BPF_ADD, R7, 20),
  3186. BPF_ALU64_IMM(BPF_ADD, R8, 20),
  3187. BPF_ALU64_IMM(BPF_ADD, R9, 20),
  3188. BPF_ALU64_IMM(BPF_SUB, R0, 10),
  3189. BPF_ALU64_IMM(BPF_SUB, R1, 10),
  3190. BPF_ALU64_IMM(BPF_SUB, R2, 10),
  3191. BPF_ALU64_IMM(BPF_SUB, R3, 10),
  3192. BPF_ALU64_IMM(BPF_SUB, R4, 10),
  3193. BPF_ALU64_IMM(BPF_SUB, R5, 10),
  3194. BPF_ALU64_IMM(BPF_SUB, R6, 10),
  3195. BPF_ALU64_IMM(BPF_SUB, R7, 10),
  3196. BPF_ALU64_IMM(BPF_SUB, R8, 10),
  3197. BPF_ALU64_IMM(BPF_SUB, R9, 10),
  3198. BPF_ALU64_REG(BPF_ADD, R0, R0),
  3199. BPF_ALU64_REG(BPF_ADD, R0, R1),
  3200. BPF_ALU64_REG(BPF_ADD, R0, R2),
  3201. BPF_ALU64_REG(BPF_ADD, R0, R3),
  3202. BPF_ALU64_REG(BPF_ADD, R0, R4),
  3203. BPF_ALU64_REG(BPF_ADD, R0, R5),
  3204. BPF_ALU64_REG(BPF_ADD, R0, R6),
  3205. BPF_ALU64_REG(BPF_ADD, R0, R7),
  3206. BPF_ALU64_REG(BPF_ADD, R0, R8),
  3207. BPF_ALU64_REG(BPF_ADD, R0, R9), /* R0 == 155 */
  3208. BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
  3209. BPF_EXIT_INSN(),
  3210. BPF_ALU64_REG(BPF_ADD, R1, R0),
  3211. BPF_ALU64_REG(BPF_ADD, R1, R1),
  3212. BPF_ALU64_REG(BPF_ADD, R1, R2),
  3213. BPF_ALU64_REG(BPF_ADD, R1, R3),
  3214. BPF_ALU64_REG(BPF_ADD, R1, R4),
  3215. BPF_ALU64_REG(BPF_ADD, R1, R5),
  3216. BPF_ALU64_REG(BPF_ADD, R1, R6),
  3217. BPF_ALU64_REG(BPF_ADD, R1, R7),
  3218. BPF_ALU64_REG(BPF_ADD, R1, R8),
  3219. BPF_ALU64_REG(BPF_ADD, R1, R9), /* R1 == 456 */
  3220. BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
  3221. BPF_EXIT_INSN(),
  3222. BPF_ALU64_REG(BPF_ADD, R2, R0),
  3223. BPF_ALU64_REG(BPF_ADD, R2, R1),
  3224. BPF_ALU64_REG(BPF_ADD, R2, R2),
  3225. BPF_ALU64_REG(BPF_ADD, R2, R3),
  3226. BPF_ALU64_REG(BPF_ADD, R2, R4),
  3227. BPF_ALU64_REG(BPF_ADD, R2, R5),
  3228. BPF_ALU64_REG(BPF_ADD, R2, R6),
  3229. BPF_ALU64_REG(BPF_ADD, R2, R7),
  3230. BPF_ALU64_REG(BPF_ADD, R2, R8),
  3231. BPF_ALU64_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
  3232. BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
  3233. BPF_EXIT_INSN(),
  3234. BPF_ALU64_REG(BPF_ADD, R3, R0),
  3235. BPF_ALU64_REG(BPF_ADD, R3, R1),
  3236. BPF_ALU64_REG(BPF_ADD, R3, R2),
  3237. BPF_ALU64_REG(BPF_ADD, R3, R3),
  3238. BPF_ALU64_REG(BPF_ADD, R3, R4),
  3239. BPF_ALU64_REG(BPF_ADD, R3, R5),
  3240. BPF_ALU64_REG(BPF_ADD, R3, R6),
  3241. BPF_ALU64_REG(BPF_ADD, R3, R7),
  3242. BPF_ALU64_REG(BPF_ADD, R3, R8),
  3243. BPF_ALU64_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
  3244. BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
  3245. BPF_EXIT_INSN(),
  3246. BPF_ALU64_REG(BPF_ADD, R4, R0),
  3247. BPF_ALU64_REG(BPF_ADD, R4, R1),
  3248. BPF_ALU64_REG(BPF_ADD, R4, R2),
  3249. BPF_ALU64_REG(BPF_ADD, R4, R3),
  3250. BPF_ALU64_REG(BPF_ADD, R4, R4),
  3251. BPF_ALU64_REG(BPF_ADD, R4, R5),
  3252. BPF_ALU64_REG(BPF_ADD, R4, R6),
  3253. BPF_ALU64_REG(BPF_ADD, R4, R7),
  3254. BPF_ALU64_REG(BPF_ADD, R4, R8),
  3255. BPF_ALU64_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
  3256. BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
  3257. BPF_EXIT_INSN(),
  3258. BPF_ALU64_REG(BPF_ADD, R5, R0),
  3259. BPF_ALU64_REG(BPF_ADD, R5, R1),
  3260. BPF_ALU64_REG(BPF_ADD, R5, R2),
  3261. BPF_ALU64_REG(BPF_ADD, R5, R3),
  3262. BPF_ALU64_REG(BPF_ADD, R5, R4),
  3263. BPF_ALU64_REG(BPF_ADD, R5, R5),
  3264. BPF_ALU64_REG(BPF_ADD, R5, R6),
  3265. BPF_ALU64_REG(BPF_ADD, R5, R7),
  3266. BPF_ALU64_REG(BPF_ADD, R5, R8),
  3267. BPF_ALU64_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
  3268. BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
  3269. BPF_EXIT_INSN(),
  3270. BPF_ALU64_REG(BPF_ADD, R6, R0),
  3271. BPF_ALU64_REG(BPF_ADD, R6, R1),
  3272. BPF_ALU64_REG(BPF_ADD, R6, R2),
  3273. BPF_ALU64_REG(BPF_ADD, R6, R3),
  3274. BPF_ALU64_REG(BPF_ADD, R6, R4),
  3275. BPF_ALU64_REG(BPF_ADD, R6, R5),
  3276. BPF_ALU64_REG(BPF_ADD, R6, R6),
  3277. BPF_ALU64_REG(BPF_ADD, R6, R7),
  3278. BPF_ALU64_REG(BPF_ADD, R6, R8),
  3279. BPF_ALU64_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
  3280. BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
  3281. BPF_EXIT_INSN(),
  3282. BPF_ALU64_REG(BPF_ADD, R7, R0),
  3283. BPF_ALU64_REG(BPF_ADD, R7, R1),
  3284. BPF_ALU64_REG(BPF_ADD, R7, R2),
  3285. BPF_ALU64_REG(BPF_ADD, R7, R3),
  3286. BPF_ALU64_REG(BPF_ADD, R7, R4),
  3287. BPF_ALU64_REG(BPF_ADD, R7, R5),
  3288. BPF_ALU64_REG(BPF_ADD, R7, R6),
  3289. BPF_ALU64_REG(BPF_ADD, R7, R7),
  3290. BPF_ALU64_REG(BPF_ADD, R7, R8),
  3291. BPF_ALU64_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
  3292. BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
  3293. BPF_EXIT_INSN(),
  3294. BPF_ALU64_REG(BPF_ADD, R8, R0),
  3295. BPF_ALU64_REG(BPF_ADD, R8, R1),
  3296. BPF_ALU64_REG(BPF_ADD, R8, R2),
  3297. BPF_ALU64_REG(BPF_ADD, R8, R3),
  3298. BPF_ALU64_REG(BPF_ADD, R8, R4),
  3299. BPF_ALU64_REG(BPF_ADD, R8, R5),
  3300. BPF_ALU64_REG(BPF_ADD, R8, R6),
  3301. BPF_ALU64_REG(BPF_ADD, R8, R7),
  3302. BPF_ALU64_REG(BPF_ADD, R8, R8),
  3303. BPF_ALU64_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
  3304. BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
  3305. BPF_EXIT_INSN(),
  3306. BPF_ALU64_REG(BPF_ADD, R9, R0),
  3307. BPF_ALU64_REG(BPF_ADD, R9, R1),
  3308. BPF_ALU64_REG(BPF_ADD, R9, R2),
  3309. BPF_ALU64_REG(BPF_ADD, R9, R3),
  3310. BPF_ALU64_REG(BPF_ADD, R9, R4),
  3311. BPF_ALU64_REG(BPF_ADD, R9, R5),
  3312. BPF_ALU64_REG(BPF_ADD, R9, R6),
  3313. BPF_ALU64_REG(BPF_ADD, R9, R7),
  3314. BPF_ALU64_REG(BPF_ADD, R9, R8),
  3315. BPF_ALU64_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
  3316. BPF_ALU64_REG(BPF_MOV, R0, R9),
  3317. BPF_EXIT_INSN(),
  3318. },
  3319. INTERNAL,
  3320. { },
  3321. { { 0, 2957380 } }
  3322. },
  3323. {
  3324. "INT: ADD 32-bit",
  3325. .u.insns_int = {
  3326. BPF_ALU32_IMM(BPF_MOV, R0, 20),
  3327. BPF_ALU32_IMM(BPF_MOV, R1, 1),
  3328. BPF_ALU32_IMM(BPF_MOV, R2, 2),
  3329. BPF_ALU32_IMM(BPF_MOV, R3, 3),
  3330. BPF_ALU32_IMM(BPF_MOV, R4, 4),
  3331. BPF_ALU32_IMM(BPF_MOV, R5, 5),
  3332. BPF_ALU32_IMM(BPF_MOV, R6, 6),
  3333. BPF_ALU32_IMM(BPF_MOV, R7, 7),
  3334. BPF_ALU32_IMM(BPF_MOV, R8, 8),
  3335. BPF_ALU32_IMM(BPF_MOV, R9, 9),
  3336. BPF_ALU64_IMM(BPF_ADD, R1, 10),
  3337. BPF_ALU64_IMM(BPF_ADD, R2, 10),
  3338. BPF_ALU64_IMM(BPF_ADD, R3, 10),
  3339. BPF_ALU64_IMM(BPF_ADD, R4, 10),
  3340. BPF_ALU64_IMM(BPF_ADD, R5, 10),
  3341. BPF_ALU64_IMM(BPF_ADD, R6, 10),
  3342. BPF_ALU64_IMM(BPF_ADD, R7, 10),
  3343. BPF_ALU64_IMM(BPF_ADD, R8, 10),
  3344. BPF_ALU64_IMM(BPF_ADD, R9, 10),
  3345. BPF_ALU32_REG(BPF_ADD, R0, R1),
  3346. BPF_ALU32_REG(BPF_ADD, R0, R2),
  3347. BPF_ALU32_REG(BPF_ADD, R0, R3),
  3348. BPF_ALU32_REG(BPF_ADD, R0, R4),
  3349. BPF_ALU32_REG(BPF_ADD, R0, R5),
  3350. BPF_ALU32_REG(BPF_ADD, R0, R6),
  3351. BPF_ALU32_REG(BPF_ADD, R0, R7),
  3352. BPF_ALU32_REG(BPF_ADD, R0, R8),
  3353. BPF_ALU32_REG(BPF_ADD, R0, R9), /* R0 == 155 */
  3354. BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
  3355. BPF_EXIT_INSN(),
  3356. BPF_ALU32_REG(BPF_ADD, R1, R0),
  3357. BPF_ALU32_REG(BPF_ADD, R1, R1),
  3358. BPF_ALU32_REG(BPF_ADD, R1, R2),
  3359. BPF_ALU32_REG(BPF_ADD, R1, R3),
  3360. BPF_ALU32_REG(BPF_ADD, R1, R4),
  3361. BPF_ALU32_REG(BPF_ADD, R1, R5),
  3362. BPF_ALU32_REG(BPF_ADD, R1, R6),
  3363. BPF_ALU32_REG(BPF_ADD, R1, R7),
  3364. BPF_ALU32_REG(BPF_ADD, R1, R8),
  3365. BPF_ALU32_REG(BPF_ADD, R1, R9), /* R1 == 456 */
  3366. BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
  3367. BPF_EXIT_INSN(),
  3368. BPF_ALU32_REG(BPF_ADD, R2, R0),
  3369. BPF_ALU32_REG(BPF_ADD, R2, R1),
  3370. BPF_ALU32_REG(BPF_ADD, R2, R2),
  3371. BPF_ALU32_REG(BPF_ADD, R2, R3),
  3372. BPF_ALU32_REG(BPF_ADD, R2, R4),
  3373. BPF_ALU32_REG(BPF_ADD, R2, R5),
  3374. BPF_ALU32_REG(BPF_ADD, R2, R6),
  3375. BPF_ALU32_REG(BPF_ADD, R2, R7),
  3376. BPF_ALU32_REG(BPF_ADD, R2, R8),
  3377. BPF_ALU32_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
  3378. BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
  3379. BPF_EXIT_INSN(),
  3380. BPF_ALU32_REG(BPF_ADD, R3, R0),
  3381. BPF_ALU32_REG(BPF_ADD, R3, R1),
  3382. BPF_ALU32_REG(BPF_ADD, R3, R2),
  3383. BPF_ALU32_REG(BPF_ADD, R3, R3),
  3384. BPF_ALU32_REG(BPF_ADD, R3, R4),
  3385. BPF_ALU32_REG(BPF_ADD, R3, R5),
  3386. BPF_ALU32_REG(BPF_ADD, R3, R6),
  3387. BPF_ALU32_REG(BPF_ADD, R3, R7),
  3388. BPF_ALU32_REG(BPF_ADD, R3, R8),
  3389. BPF_ALU32_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
  3390. BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
  3391. BPF_EXIT_INSN(),
  3392. BPF_ALU32_REG(BPF_ADD, R4, R0),
  3393. BPF_ALU32_REG(BPF_ADD, R4, R1),
  3394. BPF_ALU32_REG(BPF_ADD, R4, R2),
  3395. BPF_ALU32_REG(BPF_ADD, R4, R3),
  3396. BPF_ALU32_REG(BPF_ADD, R4, R4),
  3397. BPF_ALU32_REG(BPF_ADD, R4, R5),
  3398. BPF_ALU32_REG(BPF_ADD, R4, R6),
  3399. BPF_ALU32_REG(BPF_ADD, R4, R7),
  3400. BPF_ALU32_REG(BPF_ADD, R4, R8),
  3401. BPF_ALU32_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
  3402. BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
  3403. BPF_EXIT_INSN(),
  3404. BPF_ALU32_REG(BPF_ADD, R5, R0),
  3405. BPF_ALU32_REG(BPF_ADD, R5, R1),
  3406. BPF_ALU32_REG(BPF_ADD, R5, R2),
  3407. BPF_ALU32_REG(BPF_ADD, R5, R3),
  3408. BPF_ALU32_REG(BPF_ADD, R5, R4),
  3409. BPF_ALU32_REG(BPF_ADD, R5, R5),
  3410. BPF_ALU32_REG(BPF_ADD, R5, R6),
  3411. BPF_ALU32_REG(BPF_ADD, R5, R7),
  3412. BPF_ALU32_REG(BPF_ADD, R5, R8),
  3413. BPF_ALU32_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
  3414. BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
  3415. BPF_EXIT_INSN(),
  3416. BPF_ALU32_REG(BPF_ADD, R6, R0),
  3417. BPF_ALU32_REG(BPF_ADD, R6, R1),
  3418. BPF_ALU32_REG(BPF_ADD, R6, R2),
  3419. BPF_ALU32_REG(BPF_ADD, R6, R3),
  3420. BPF_ALU32_REG(BPF_ADD, R6, R4),
  3421. BPF_ALU32_REG(BPF_ADD, R6, R5),
  3422. BPF_ALU32_REG(BPF_ADD, R6, R6),
  3423. BPF_ALU32_REG(BPF_ADD, R6, R7),
  3424. BPF_ALU32_REG(BPF_ADD, R6, R8),
  3425. BPF_ALU32_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
  3426. BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
  3427. BPF_EXIT_INSN(),
  3428. BPF_ALU32_REG(BPF_ADD, R7, R0),
  3429. BPF_ALU32_REG(BPF_ADD, R7, R1),
  3430. BPF_ALU32_REG(BPF_ADD, R7, R2),
  3431. BPF_ALU32_REG(BPF_ADD, R7, R3),
  3432. BPF_ALU32_REG(BPF_ADD, R7, R4),
  3433. BPF_ALU32_REG(BPF_ADD, R7, R5),
  3434. BPF_ALU32_REG(BPF_ADD, R7, R6),
  3435. BPF_ALU32_REG(BPF_ADD, R7, R7),
  3436. BPF_ALU32_REG(BPF_ADD, R7, R8),
  3437. BPF_ALU32_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
  3438. BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
  3439. BPF_EXIT_INSN(),
  3440. BPF_ALU32_REG(BPF_ADD, R8, R0),
  3441. BPF_ALU32_REG(BPF_ADD, R8, R1),
  3442. BPF_ALU32_REG(BPF_ADD, R8, R2),
  3443. BPF_ALU32_REG(BPF_ADD, R8, R3),
  3444. BPF_ALU32_REG(BPF_ADD, R8, R4),
  3445. BPF_ALU32_REG(BPF_ADD, R8, R5),
  3446. BPF_ALU32_REG(BPF_ADD, R8, R6),
  3447. BPF_ALU32_REG(BPF_ADD, R8, R7),
  3448. BPF_ALU32_REG(BPF_ADD, R8, R8),
  3449. BPF_ALU32_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
  3450. BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
  3451. BPF_EXIT_INSN(),
  3452. BPF_ALU32_REG(BPF_ADD, R9, R0),
  3453. BPF_ALU32_REG(BPF_ADD, R9, R1),
  3454. BPF_ALU32_REG(BPF_ADD, R9, R2),
  3455. BPF_ALU32_REG(BPF_ADD, R9, R3),
  3456. BPF_ALU32_REG(BPF_ADD, R9, R4),
  3457. BPF_ALU32_REG(BPF_ADD, R9, R5),
  3458. BPF_ALU32_REG(BPF_ADD, R9, R6),
  3459. BPF_ALU32_REG(BPF_ADD, R9, R7),
  3460. BPF_ALU32_REG(BPF_ADD, R9, R8),
  3461. BPF_ALU32_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
  3462. BPF_ALU32_REG(BPF_MOV, R0, R9),
  3463. BPF_EXIT_INSN(),
  3464. },
  3465. INTERNAL,
  3466. { },
  3467. { { 0, 2957380 } }
  3468. },
  3469. { /* Mainly checking JIT here. */
  3470. "INT: SUB",
  3471. .u.insns_int = {
  3472. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  3473. BPF_ALU64_IMM(BPF_MOV, R1, 1),
  3474. BPF_ALU64_IMM(BPF_MOV, R2, 2),
  3475. BPF_ALU64_IMM(BPF_MOV, R3, 3),
  3476. BPF_ALU64_IMM(BPF_MOV, R4, 4),
  3477. BPF_ALU64_IMM(BPF_MOV, R5, 5),
  3478. BPF_ALU64_IMM(BPF_MOV, R6, 6),
  3479. BPF_ALU64_IMM(BPF_MOV, R7, 7),
  3480. BPF_ALU64_IMM(BPF_MOV, R8, 8),
  3481. BPF_ALU64_IMM(BPF_MOV, R9, 9),
  3482. BPF_ALU64_REG(BPF_SUB, R0, R0),
  3483. BPF_ALU64_REG(BPF_SUB, R0, R1),
  3484. BPF_ALU64_REG(BPF_SUB, R0, R2),
  3485. BPF_ALU64_REG(BPF_SUB, R0, R3),
  3486. BPF_ALU64_REG(BPF_SUB, R0, R4),
  3487. BPF_ALU64_REG(BPF_SUB, R0, R5),
  3488. BPF_ALU64_REG(BPF_SUB, R0, R6),
  3489. BPF_ALU64_REG(BPF_SUB, R0, R7),
  3490. BPF_ALU64_REG(BPF_SUB, R0, R8),
  3491. BPF_ALU64_REG(BPF_SUB, R0, R9),
  3492. BPF_ALU64_IMM(BPF_SUB, R0, 10),
  3493. BPF_JMP_IMM(BPF_JEQ, R0, -55, 1),
  3494. BPF_EXIT_INSN(),
  3495. BPF_ALU64_REG(BPF_SUB, R1, R0),
  3496. BPF_ALU64_REG(BPF_SUB, R1, R2),
  3497. BPF_ALU64_REG(BPF_SUB, R1, R3),
  3498. BPF_ALU64_REG(BPF_SUB, R1, R4),
  3499. BPF_ALU64_REG(BPF_SUB, R1, R5),
  3500. BPF_ALU64_REG(BPF_SUB, R1, R6),
  3501. BPF_ALU64_REG(BPF_SUB, R1, R7),
  3502. BPF_ALU64_REG(BPF_SUB, R1, R8),
  3503. BPF_ALU64_REG(BPF_SUB, R1, R9),
  3504. BPF_ALU64_IMM(BPF_SUB, R1, 10),
  3505. BPF_ALU64_REG(BPF_SUB, R2, R0),
  3506. BPF_ALU64_REG(BPF_SUB, R2, R1),
  3507. BPF_ALU64_REG(BPF_SUB, R2, R3),
  3508. BPF_ALU64_REG(BPF_SUB, R2, R4),
  3509. BPF_ALU64_REG(BPF_SUB, R2, R5),
  3510. BPF_ALU64_REG(BPF_SUB, R2, R6),
  3511. BPF_ALU64_REG(BPF_SUB, R2, R7),
  3512. BPF_ALU64_REG(BPF_SUB, R2, R8),
  3513. BPF_ALU64_REG(BPF_SUB, R2, R9),
  3514. BPF_ALU64_IMM(BPF_SUB, R2, 10),
  3515. BPF_ALU64_REG(BPF_SUB, R3, R0),
  3516. BPF_ALU64_REG(BPF_SUB, R3, R1),
  3517. BPF_ALU64_REG(BPF_SUB, R3, R2),
  3518. BPF_ALU64_REG(BPF_SUB, R3, R4),
  3519. BPF_ALU64_REG(BPF_SUB, R3, R5),
  3520. BPF_ALU64_REG(BPF_SUB, R3, R6),
  3521. BPF_ALU64_REG(BPF_SUB, R3, R7),
  3522. BPF_ALU64_REG(BPF_SUB, R3, R8),
  3523. BPF_ALU64_REG(BPF_SUB, R3, R9),
  3524. BPF_ALU64_IMM(BPF_SUB, R3, 10),
  3525. BPF_ALU64_REG(BPF_SUB, R4, R0),
  3526. BPF_ALU64_REG(BPF_SUB, R4, R1),
  3527. BPF_ALU64_REG(BPF_SUB, R4, R2),
  3528. BPF_ALU64_REG(BPF_SUB, R4, R3),
  3529. BPF_ALU64_REG(BPF_SUB, R4, R5),
  3530. BPF_ALU64_REG(BPF_SUB, R4, R6),
  3531. BPF_ALU64_REG(BPF_SUB, R4, R7),
  3532. BPF_ALU64_REG(BPF_SUB, R4, R8),
  3533. BPF_ALU64_REG(BPF_SUB, R4, R9),
  3534. BPF_ALU64_IMM(BPF_SUB, R4, 10),
  3535. BPF_ALU64_REG(BPF_SUB, R5, R0),
  3536. BPF_ALU64_REG(BPF_SUB, R5, R1),
  3537. BPF_ALU64_REG(BPF_SUB, R5, R2),
  3538. BPF_ALU64_REG(BPF_SUB, R5, R3),
  3539. BPF_ALU64_REG(BPF_SUB, R5, R4),
  3540. BPF_ALU64_REG(BPF_SUB, R5, R6),
  3541. BPF_ALU64_REG(BPF_SUB, R5, R7),
  3542. BPF_ALU64_REG(BPF_SUB, R5, R8),
  3543. BPF_ALU64_REG(BPF_SUB, R5, R9),
  3544. BPF_ALU64_IMM(BPF_SUB, R5, 10),
  3545. BPF_ALU64_REG(BPF_SUB, R6, R0),
  3546. BPF_ALU64_REG(BPF_SUB, R6, R1),
  3547. BPF_ALU64_REG(BPF_SUB, R6, R2),
  3548. BPF_ALU64_REG(BPF_SUB, R6, R3),
  3549. BPF_ALU64_REG(BPF_SUB, R6, R4),
  3550. BPF_ALU64_REG(BPF_SUB, R6, R5),
  3551. BPF_ALU64_REG(BPF_SUB, R6, R7),
  3552. BPF_ALU64_REG(BPF_SUB, R6, R8),
  3553. BPF_ALU64_REG(BPF_SUB, R6, R9),
  3554. BPF_ALU64_IMM(BPF_SUB, R6, 10),
  3555. BPF_ALU64_REG(BPF_SUB, R7, R0),
  3556. BPF_ALU64_REG(BPF_SUB, R7, R1),
  3557. BPF_ALU64_REG(BPF_SUB, R7, R2),
  3558. BPF_ALU64_REG(BPF_SUB, R7, R3),
  3559. BPF_ALU64_REG(BPF_SUB, R7, R4),
  3560. BPF_ALU64_REG(BPF_SUB, R7, R5),
  3561. BPF_ALU64_REG(BPF_SUB, R7, R6),
  3562. BPF_ALU64_REG(BPF_SUB, R7, R8),
  3563. BPF_ALU64_REG(BPF_SUB, R7, R9),
  3564. BPF_ALU64_IMM(BPF_SUB, R7, 10),
  3565. BPF_ALU64_REG(BPF_SUB, R8, R0),
  3566. BPF_ALU64_REG(BPF_SUB, R8, R1),
  3567. BPF_ALU64_REG(BPF_SUB, R8, R2),
  3568. BPF_ALU64_REG(BPF_SUB, R8, R3),
  3569. BPF_ALU64_REG(BPF_SUB, R8, R4),
  3570. BPF_ALU64_REG(BPF_SUB, R8, R5),
  3571. BPF_ALU64_REG(BPF_SUB, R8, R6),
  3572. BPF_ALU64_REG(BPF_SUB, R8, R7),
  3573. BPF_ALU64_REG(BPF_SUB, R8, R9),
  3574. BPF_ALU64_IMM(BPF_SUB, R8, 10),
  3575. BPF_ALU64_REG(BPF_SUB, R9, R0),
  3576. BPF_ALU64_REG(BPF_SUB, R9, R1),
  3577. BPF_ALU64_REG(BPF_SUB, R9, R2),
  3578. BPF_ALU64_REG(BPF_SUB, R9, R3),
  3579. BPF_ALU64_REG(BPF_SUB, R9, R4),
  3580. BPF_ALU64_REG(BPF_SUB, R9, R5),
  3581. BPF_ALU64_REG(BPF_SUB, R9, R6),
  3582. BPF_ALU64_REG(BPF_SUB, R9, R7),
  3583. BPF_ALU64_REG(BPF_SUB, R9, R8),
  3584. BPF_ALU64_IMM(BPF_SUB, R9, 10),
  3585. BPF_ALU64_IMM(BPF_SUB, R0, 10),
  3586. BPF_ALU64_IMM(BPF_NEG, R0, 0),
  3587. BPF_ALU64_REG(BPF_SUB, R0, R1),
  3588. BPF_ALU64_REG(BPF_SUB, R0, R2),
  3589. BPF_ALU64_REG(BPF_SUB, R0, R3),
  3590. BPF_ALU64_REG(BPF_SUB, R0, R4),
  3591. BPF_ALU64_REG(BPF_SUB, R0, R5),
  3592. BPF_ALU64_REG(BPF_SUB, R0, R6),
  3593. BPF_ALU64_REG(BPF_SUB, R0, R7),
  3594. BPF_ALU64_REG(BPF_SUB, R0, R8),
  3595. BPF_ALU64_REG(BPF_SUB, R0, R9),
  3596. BPF_EXIT_INSN(),
  3597. },
  3598. INTERNAL,
  3599. { },
  3600. { { 0, 11 } }
  3601. },
  3602. { /* Mainly checking JIT here. */
  3603. "INT: XOR",
  3604. .u.insns_int = {
  3605. BPF_ALU64_REG(BPF_SUB, R0, R0),
  3606. BPF_ALU64_REG(BPF_XOR, R1, R1),
  3607. BPF_JMP_REG(BPF_JEQ, R0, R1, 1),
  3608. BPF_EXIT_INSN(),
  3609. BPF_ALU64_IMM(BPF_MOV, R0, 10),
  3610. BPF_ALU64_IMM(BPF_MOV, R1, -1),
  3611. BPF_ALU64_REG(BPF_SUB, R1, R1),
  3612. BPF_ALU64_REG(BPF_XOR, R2, R2),
  3613. BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
  3614. BPF_EXIT_INSN(),
  3615. BPF_ALU64_REG(BPF_SUB, R2, R2),
  3616. BPF_ALU64_REG(BPF_XOR, R3, R3),
  3617. BPF_ALU64_IMM(BPF_MOV, R0, 10),
  3618. BPF_ALU64_IMM(BPF_MOV, R1, -1),
  3619. BPF_JMP_REG(BPF_JEQ, R2, R3, 1),
  3620. BPF_EXIT_INSN(),
  3621. BPF_ALU64_REG(BPF_SUB, R3, R3),
  3622. BPF_ALU64_REG(BPF_XOR, R4, R4),
  3623. BPF_ALU64_IMM(BPF_MOV, R2, 1),
  3624. BPF_ALU64_IMM(BPF_MOV, R5, -1),
  3625. BPF_JMP_REG(BPF_JEQ, R3, R4, 1),
  3626. BPF_EXIT_INSN(),
  3627. BPF_ALU64_REG(BPF_SUB, R4, R4),
  3628. BPF_ALU64_REG(BPF_XOR, R5, R5),
  3629. BPF_ALU64_IMM(BPF_MOV, R3, 1),
  3630. BPF_ALU64_IMM(BPF_MOV, R7, -1),
  3631. BPF_JMP_REG(BPF_JEQ, R5, R4, 1),
  3632. BPF_EXIT_INSN(),
  3633. BPF_ALU64_IMM(BPF_MOV, R5, 1),
  3634. BPF_ALU64_REG(BPF_SUB, R5, R5),
  3635. BPF_ALU64_REG(BPF_XOR, R6, R6),
  3636. BPF_ALU64_IMM(BPF_MOV, R1, 1),
  3637. BPF_ALU64_IMM(BPF_MOV, R8, -1),
  3638. BPF_JMP_REG(BPF_JEQ, R5, R6, 1),
  3639. BPF_EXIT_INSN(),
  3640. BPF_ALU64_REG(BPF_SUB, R6, R6),
  3641. BPF_ALU64_REG(BPF_XOR, R7, R7),
  3642. BPF_JMP_REG(BPF_JEQ, R7, R6, 1),
  3643. BPF_EXIT_INSN(),
  3644. BPF_ALU64_REG(BPF_SUB, R7, R7),
  3645. BPF_ALU64_REG(BPF_XOR, R8, R8),
  3646. BPF_JMP_REG(BPF_JEQ, R7, R8, 1),
  3647. BPF_EXIT_INSN(),
  3648. BPF_ALU64_REG(BPF_SUB, R8, R8),
  3649. BPF_ALU64_REG(BPF_XOR, R9, R9),
  3650. BPF_JMP_REG(BPF_JEQ, R9, R8, 1),
  3651. BPF_EXIT_INSN(),
  3652. BPF_ALU64_REG(BPF_SUB, R9, R9),
  3653. BPF_ALU64_REG(BPF_XOR, R0, R0),
  3654. BPF_JMP_REG(BPF_JEQ, R9, R0, 1),
  3655. BPF_EXIT_INSN(),
  3656. BPF_ALU64_REG(BPF_SUB, R1, R1),
  3657. BPF_ALU64_REG(BPF_XOR, R0, R0),
  3658. BPF_JMP_REG(BPF_JEQ, R9, R0, 2),
  3659. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  3660. BPF_EXIT_INSN(),
  3661. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  3662. BPF_EXIT_INSN(),
  3663. },
  3664. INTERNAL,
  3665. { },
  3666. { { 0, 1 } }
  3667. },
  3668. { /* Mainly checking JIT here. */
  3669. "INT: MUL",
  3670. .u.insns_int = {
  3671. BPF_ALU64_IMM(BPF_MOV, R0, 11),
  3672. BPF_ALU64_IMM(BPF_MOV, R1, 1),
  3673. BPF_ALU64_IMM(BPF_MOV, R2, 2),
  3674. BPF_ALU64_IMM(BPF_MOV, R3, 3),
  3675. BPF_ALU64_IMM(BPF_MOV, R4, 4),
  3676. BPF_ALU64_IMM(BPF_MOV, R5, 5),
  3677. BPF_ALU64_IMM(BPF_MOV, R6, 6),
  3678. BPF_ALU64_IMM(BPF_MOV, R7, 7),
  3679. BPF_ALU64_IMM(BPF_MOV, R8, 8),
  3680. BPF_ALU64_IMM(BPF_MOV, R9, 9),
  3681. BPF_ALU64_REG(BPF_MUL, R0, R0),
  3682. BPF_ALU64_REG(BPF_MUL, R0, R1),
  3683. BPF_ALU64_REG(BPF_MUL, R0, R2),
  3684. BPF_ALU64_REG(BPF_MUL, R0, R3),
  3685. BPF_ALU64_REG(BPF_MUL, R0, R4),
  3686. BPF_ALU64_REG(BPF_MUL, R0, R5),
  3687. BPF_ALU64_REG(BPF_MUL, R0, R6),
  3688. BPF_ALU64_REG(BPF_MUL, R0, R7),
  3689. BPF_ALU64_REG(BPF_MUL, R0, R8),
  3690. BPF_ALU64_REG(BPF_MUL, R0, R9),
  3691. BPF_ALU64_IMM(BPF_MUL, R0, 10),
  3692. BPF_JMP_IMM(BPF_JEQ, R0, 439084800, 1),
  3693. BPF_EXIT_INSN(),
  3694. BPF_ALU64_REG(BPF_MUL, R1, R0),
  3695. BPF_ALU64_REG(BPF_MUL, R1, R2),
  3696. BPF_ALU64_REG(BPF_MUL, R1, R3),
  3697. BPF_ALU64_REG(BPF_MUL, R1, R4),
  3698. BPF_ALU64_REG(BPF_MUL, R1, R5),
  3699. BPF_ALU64_REG(BPF_MUL, R1, R6),
  3700. BPF_ALU64_REG(BPF_MUL, R1, R7),
  3701. BPF_ALU64_REG(BPF_MUL, R1, R8),
  3702. BPF_ALU64_REG(BPF_MUL, R1, R9),
  3703. BPF_ALU64_IMM(BPF_MUL, R1, 10),
  3704. BPF_ALU64_REG(BPF_MOV, R2, R1),
  3705. BPF_ALU64_IMM(BPF_RSH, R2, 32),
  3706. BPF_JMP_IMM(BPF_JEQ, R2, 0x5a924, 1),
  3707. BPF_EXIT_INSN(),
  3708. BPF_ALU64_IMM(BPF_LSH, R1, 32),
  3709. BPF_ALU64_IMM(BPF_ARSH, R1, 32),
  3710. BPF_JMP_IMM(BPF_JEQ, R1, 0xebb90000, 1),
  3711. BPF_EXIT_INSN(),
  3712. BPF_ALU64_REG(BPF_MUL, R2, R0),
  3713. BPF_ALU64_REG(BPF_MUL, R2, R1),
  3714. BPF_ALU64_REG(BPF_MUL, R2, R3),
  3715. BPF_ALU64_REG(BPF_MUL, R2, R4),
  3716. BPF_ALU64_REG(BPF_MUL, R2, R5),
  3717. BPF_ALU64_REG(BPF_MUL, R2, R6),
  3718. BPF_ALU64_REG(BPF_MUL, R2, R7),
  3719. BPF_ALU64_REG(BPF_MUL, R2, R8),
  3720. BPF_ALU64_REG(BPF_MUL, R2, R9),
  3721. BPF_ALU64_IMM(BPF_MUL, R2, 10),
  3722. BPF_ALU64_IMM(BPF_RSH, R2, 32),
  3723. BPF_ALU64_REG(BPF_MOV, R0, R2),
  3724. BPF_EXIT_INSN(),
  3725. },
  3726. INTERNAL,
  3727. { },
  3728. { { 0, 0x35d97ef2 } }
  3729. },
  3730. { /* Mainly checking JIT here. */
  3731. "MOV REG64",
  3732. .u.insns_int = {
  3733. BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
  3734. BPF_MOV64_REG(R1, R0),
  3735. BPF_MOV64_REG(R2, R1),
  3736. BPF_MOV64_REG(R3, R2),
  3737. BPF_MOV64_REG(R4, R3),
  3738. BPF_MOV64_REG(R5, R4),
  3739. BPF_MOV64_REG(R6, R5),
  3740. BPF_MOV64_REG(R7, R6),
  3741. BPF_MOV64_REG(R8, R7),
  3742. BPF_MOV64_REG(R9, R8),
  3743. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  3744. BPF_ALU64_IMM(BPF_MOV, R1, 0),
  3745. BPF_ALU64_IMM(BPF_MOV, R2, 0),
  3746. BPF_ALU64_IMM(BPF_MOV, R3, 0),
  3747. BPF_ALU64_IMM(BPF_MOV, R4, 0),
  3748. BPF_ALU64_IMM(BPF_MOV, R5, 0),
  3749. BPF_ALU64_IMM(BPF_MOV, R6, 0),
  3750. BPF_ALU64_IMM(BPF_MOV, R7, 0),
  3751. BPF_ALU64_IMM(BPF_MOV, R8, 0),
  3752. BPF_ALU64_IMM(BPF_MOV, R9, 0),
  3753. BPF_ALU64_REG(BPF_ADD, R0, R0),
  3754. BPF_ALU64_REG(BPF_ADD, R0, R1),
  3755. BPF_ALU64_REG(BPF_ADD, R0, R2),
  3756. BPF_ALU64_REG(BPF_ADD, R0, R3),
  3757. BPF_ALU64_REG(BPF_ADD, R0, R4),
  3758. BPF_ALU64_REG(BPF_ADD, R0, R5),
  3759. BPF_ALU64_REG(BPF_ADD, R0, R6),
  3760. BPF_ALU64_REG(BPF_ADD, R0, R7),
  3761. BPF_ALU64_REG(BPF_ADD, R0, R8),
  3762. BPF_ALU64_REG(BPF_ADD, R0, R9),
  3763. BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
  3764. BPF_EXIT_INSN(),
  3765. },
  3766. INTERNAL,
  3767. { },
  3768. { { 0, 0xfefe } }
  3769. },
  3770. { /* Mainly checking JIT here. */
  3771. "MOV REG32",
  3772. .u.insns_int = {
  3773. BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
  3774. BPF_MOV64_REG(R1, R0),
  3775. BPF_MOV64_REG(R2, R1),
  3776. BPF_MOV64_REG(R3, R2),
  3777. BPF_MOV64_REG(R4, R3),
  3778. BPF_MOV64_REG(R5, R4),
  3779. BPF_MOV64_REG(R6, R5),
  3780. BPF_MOV64_REG(R7, R6),
  3781. BPF_MOV64_REG(R8, R7),
  3782. BPF_MOV64_REG(R9, R8),
  3783. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  3784. BPF_ALU32_IMM(BPF_MOV, R1, 0),
  3785. BPF_ALU32_IMM(BPF_MOV, R2, 0),
  3786. BPF_ALU32_IMM(BPF_MOV, R3, 0),
  3787. BPF_ALU32_IMM(BPF_MOV, R4, 0),
  3788. BPF_ALU32_IMM(BPF_MOV, R5, 0),
  3789. BPF_ALU32_IMM(BPF_MOV, R6, 0),
  3790. BPF_ALU32_IMM(BPF_MOV, R7, 0),
  3791. BPF_ALU32_IMM(BPF_MOV, R8, 0),
  3792. BPF_ALU32_IMM(BPF_MOV, R9, 0),
  3793. BPF_ALU64_REG(BPF_ADD, R0, R0),
  3794. BPF_ALU64_REG(BPF_ADD, R0, R1),
  3795. BPF_ALU64_REG(BPF_ADD, R0, R2),
  3796. BPF_ALU64_REG(BPF_ADD, R0, R3),
  3797. BPF_ALU64_REG(BPF_ADD, R0, R4),
  3798. BPF_ALU64_REG(BPF_ADD, R0, R5),
  3799. BPF_ALU64_REG(BPF_ADD, R0, R6),
  3800. BPF_ALU64_REG(BPF_ADD, R0, R7),
  3801. BPF_ALU64_REG(BPF_ADD, R0, R8),
  3802. BPF_ALU64_REG(BPF_ADD, R0, R9),
  3803. BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
  3804. BPF_EXIT_INSN(),
  3805. },
  3806. INTERNAL,
  3807. { },
  3808. { { 0, 0xfefe } }
  3809. },
  3810. { /* Mainly checking JIT here. */
  3811. "LD IMM64",
  3812. .u.insns_int = {
  3813. BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
  3814. BPF_MOV64_REG(R1, R0),
  3815. BPF_MOV64_REG(R2, R1),
  3816. BPF_MOV64_REG(R3, R2),
  3817. BPF_MOV64_REG(R4, R3),
  3818. BPF_MOV64_REG(R5, R4),
  3819. BPF_MOV64_REG(R6, R5),
  3820. BPF_MOV64_REG(R7, R6),
  3821. BPF_MOV64_REG(R8, R7),
  3822. BPF_MOV64_REG(R9, R8),
  3823. BPF_LD_IMM64(R0, 0x0LL),
  3824. BPF_LD_IMM64(R1, 0x0LL),
  3825. BPF_LD_IMM64(R2, 0x0LL),
  3826. BPF_LD_IMM64(R3, 0x0LL),
  3827. BPF_LD_IMM64(R4, 0x0LL),
  3828. BPF_LD_IMM64(R5, 0x0LL),
  3829. BPF_LD_IMM64(R6, 0x0LL),
  3830. BPF_LD_IMM64(R7, 0x0LL),
  3831. BPF_LD_IMM64(R8, 0x0LL),
  3832. BPF_LD_IMM64(R9, 0x0LL),
  3833. BPF_ALU64_REG(BPF_ADD, R0, R0),
  3834. BPF_ALU64_REG(BPF_ADD, R0, R1),
  3835. BPF_ALU64_REG(BPF_ADD, R0, R2),
  3836. BPF_ALU64_REG(BPF_ADD, R0, R3),
  3837. BPF_ALU64_REG(BPF_ADD, R0, R4),
  3838. BPF_ALU64_REG(BPF_ADD, R0, R5),
  3839. BPF_ALU64_REG(BPF_ADD, R0, R6),
  3840. BPF_ALU64_REG(BPF_ADD, R0, R7),
  3841. BPF_ALU64_REG(BPF_ADD, R0, R8),
  3842. BPF_ALU64_REG(BPF_ADD, R0, R9),
  3843. BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
  3844. BPF_EXIT_INSN(),
  3845. },
  3846. INTERNAL,
  3847. { },
  3848. { { 0, 0xfefe } }
  3849. },
  3850. {
  3851. "INT: ALU MIX",
  3852. .u.insns_int = {
  3853. BPF_ALU64_IMM(BPF_MOV, R0, 11),
  3854. BPF_ALU64_IMM(BPF_ADD, R0, -1),
  3855. BPF_ALU64_IMM(BPF_MOV, R2, 2),
  3856. BPF_ALU64_IMM(BPF_XOR, R2, 3),
  3857. BPF_ALU64_REG(BPF_DIV, R0, R2),
  3858. BPF_JMP_IMM(BPF_JEQ, R0, 10, 1),
  3859. BPF_EXIT_INSN(),
  3860. BPF_ALU64_IMM(BPF_MOD, R0, 3),
  3861. BPF_JMP_IMM(BPF_JEQ, R0, 1, 1),
  3862. BPF_EXIT_INSN(),
  3863. BPF_ALU64_IMM(BPF_MOV, R0, -1),
  3864. BPF_EXIT_INSN(),
  3865. },
  3866. INTERNAL,
  3867. { },
  3868. { { 0, -1 } }
  3869. },
  3870. {
  3871. "INT: shifts by register",
  3872. .u.insns_int = {
  3873. BPF_MOV64_IMM(R0, -1234),
  3874. BPF_MOV64_IMM(R1, 1),
  3875. BPF_ALU32_REG(BPF_RSH, R0, R1),
  3876. BPF_JMP_IMM(BPF_JEQ, R0, 0x7ffffd97, 1),
  3877. BPF_EXIT_INSN(),
  3878. BPF_MOV64_IMM(R2, 1),
  3879. BPF_ALU64_REG(BPF_LSH, R0, R2),
  3880. BPF_MOV32_IMM(R4, -1234),
  3881. BPF_JMP_REG(BPF_JEQ, R0, R4, 1),
  3882. BPF_EXIT_INSN(),
  3883. BPF_ALU64_IMM(BPF_AND, R4, 63),
  3884. BPF_ALU64_REG(BPF_LSH, R0, R4), /* R0 <= 46 */
  3885. BPF_MOV64_IMM(R3, 47),
  3886. BPF_ALU64_REG(BPF_ARSH, R0, R3),
  3887. BPF_JMP_IMM(BPF_JEQ, R0, -617, 1),
  3888. BPF_EXIT_INSN(),
  3889. BPF_MOV64_IMM(R2, 1),
  3890. BPF_ALU64_REG(BPF_LSH, R4, R2), /* R4 = 46 << 1 */
  3891. BPF_JMP_IMM(BPF_JEQ, R4, 92, 1),
  3892. BPF_EXIT_INSN(),
  3893. BPF_MOV64_IMM(R4, 4),
  3894. BPF_ALU64_REG(BPF_LSH, R4, R4), /* R4 = 4 << 4 */
  3895. BPF_JMP_IMM(BPF_JEQ, R4, 64, 1),
  3896. BPF_EXIT_INSN(),
  3897. BPF_MOV64_IMM(R4, 5),
  3898. BPF_ALU32_REG(BPF_LSH, R4, R4), /* R4 = 5 << 5 */
  3899. BPF_JMP_IMM(BPF_JEQ, R4, 160, 1),
  3900. BPF_EXIT_INSN(),
  3901. BPF_MOV64_IMM(R0, -1),
  3902. BPF_EXIT_INSN(),
  3903. },
  3904. INTERNAL,
  3905. { },
  3906. { { 0, -1 } }
  3907. },
  3908. #ifdef CONFIG_32BIT
  3909. {
  3910. "INT: 32-bit context pointer word order and zero-extension",
  3911. .u.insns_int = {
  3912. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  3913. BPF_JMP32_IMM(BPF_JEQ, R1, 0, 3),
  3914. BPF_ALU64_IMM(BPF_RSH, R1, 32),
  3915. BPF_JMP32_IMM(BPF_JNE, R1, 0, 1),
  3916. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  3917. BPF_EXIT_INSN(),
  3918. },
  3919. INTERNAL,
  3920. { },
  3921. { { 0, 1 } }
  3922. },
  3923. #endif
  3924. {
  3925. "check: missing ret",
  3926. .u.insns = {
  3927. BPF_STMT(BPF_LD | BPF_IMM, 1),
  3928. },
  3929. CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
  3930. { },
  3931. { },
  3932. .fill_helper = NULL,
  3933. .expected_errcode = -EINVAL,
  3934. },
  3935. {
  3936. "check: div_k_0",
  3937. .u.insns = {
  3938. BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0),
  3939. BPF_STMT(BPF_RET | BPF_K, 0)
  3940. },
  3941. CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
  3942. { },
  3943. { },
  3944. .fill_helper = NULL,
  3945. .expected_errcode = -EINVAL,
  3946. },
  3947. {
  3948. "check: unknown insn",
  3949. .u.insns = {
  3950. /* seccomp insn, rejected in socket filter */
  3951. BPF_STMT(BPF_LDX | BPF_W | BPF_ABS, 0),
  3952. BPF_STMT(BPF_RET | BPF_K, 0)
  3953. },
  3954. CLASSIC | FLAG_EXPECTED_FAIL,
  3955. { },
  3956. { },
  3957. .fill_helper = NULL,
  3958. .expected_errcode = -EINVAL,
  3959. },
  3960. {
  3961. "check: out of range spill/fill",
  3962. .u.insns = {
  3963. BPF_STMT(BPF_STX, 16),
  3964. BPF_STMT(BPF_RET | BPF_K, 0)
  3965. },
  3966. CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
  3967. { },
  3968. { },
  3969. .fill_helper = NULL,
  3970. .expected_errcode = -EINVAL,
  3971. },
  3972. {
  3973. "JUMPS + HOLES",
  3974. .u.insns = {
  3975. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  3976. BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 15),
  3977. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  3978. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  3979. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  3980. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  3981. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  3982. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  3983. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  3984. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  3985. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  3986. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  3987. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  3988. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  3989. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  3990. BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 3, 4),
  3991. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  3992. BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 1, 2),
  3993. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  3994. BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15),
  3995. BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14),
  3996. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  3997. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  3998. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  3999. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4000. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4001. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4002. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4003. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4004. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4005. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4006. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4007. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4008. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4009. BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 2, 3),
  4010. BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 1, 2),
  4011. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4012. BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15),
  4013. BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14),
  4014. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4015. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4016. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4017. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4018. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4019. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4020. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4021. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4022. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4023. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4024. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4025. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4026. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4027. BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 2, 3),
  4028. BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 1, 2),
  4029. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
  4030. BPF_STMT(BPF_RET | BPF_A, 0),
  4031. BPF_STMT(BPF_RET | BPF_A, 0),
  4032. },
  4033. CLASSIC,
  4034. { 0x00, 0x1b, 0x21, 0x3c, 0x9d, 0xf8,
  4035. 0x90, 0xe2, 0xba, 0x0a, 0x56, 0xb4,
  4036. 0x08, 0x00,
  4037. 0x45, 0x00, 0x00, 0x28, 0x00, 0x00,
  4038. 0x20, 0x00, 0x40, 0x11, 0x00, 0x00, /* IP header */
  4039. 0xc0, 0xa8, 0x33, 0x01,
  4040. 0xc0, 0xa8, 0x33, 0x02,
  4041. 0xbb, 0xb6,
  4042. 0xa9, 0xfa,
  4043. 0x00, 0x14, 0x00, 0x00,
  4044. 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
  4045. 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
  4046. 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
  4047. 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
  4048. 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
  4049. 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
  4050. 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
  4051. 0xcc, 0xcc, 0xcc, 0xcc },
  4052. { { 88, 0x001b } }
  4053. },
  4054. {
  4055. "check: RET X",
  4056. .u.insns = {
  4057. BPF_STMT(BPF_RET | BPF_X, 0),
  4058. },
  4059. CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
  4060. { },
  4061. { },
  4062. .fill_helper = NULL,
  4063. .expected_errcode = -EINVAL,
  4064. },
  4065. {
  4066. "check: LDX + RET X",
  4067. .u.insns = {
  4068. BPF_STMT(BPF_LDX | BPF_IMM, 42),
  4069. BPF_STMT(BPF_RET | BPF_X, 0),
  4070. },
  4071. CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
  4072. { },
  4073. { },
  4074. .fill_helper = NULL,
  4075. .expected_errcode = -EINVAL,
  4076. },
  4077. { /* Mainly checking JIT here. */
  4078. "M[]: alt STX + LDX",
  4079. .u.insns = {
  4080. BPF_STMT(BPF_LDX | BPF_IMM, 100),
  4081. BPF_STMT(BPF_STX, 0),
  4082. BPF_STMT(BPF_LDX | BPF_MEM, 0),
  4083. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  4084. BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
  4085. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  4086. BPF_STMT(BPF_STX, 1),
  4087. BPF_STMT(BPF_LDX | BPF_MEM, 1),
  4088. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  4089. BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
  4090. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  4091. BPF_STMT(BPF_STX, 2),
  4092. BPF_STMT(BPF_LDX | BPF_MEM, 2),
  4093. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  4094. BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
  4095. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  4096. BPF_STMT(BPF_STX, 3),
  4097. BPF_STMT(BPF_LDX | BPF_MEM, 3),
  4098. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  4099. BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
  4100. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  4101. BPF_STMT(BPF_STX, 4),
  4102. BPF_STMT(BPF_LDX | BPF_MEM, 4),
  4103. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  4104. BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
  4105. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  4106. BPF_STMT(BPF_STX, 5),
  4107. BPF_STMT(BPF_LDX | BPF_MEM, 5),
  4108. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  4109. BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
  4110. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  4111. BPF_STMT(BPF_STX, 6),
  4112. BPF_STMT(BPF_LDX | BPF_MEM, 6),
  4113. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  4114. BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
  4115. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  4116. BPF_STMT(BPF_STX, 7),
  4117. BPF_STMT(BPF_LDX | BPF_MEM, 7),
  4118. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  4119. BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
  4120. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  4121. BPF_STMT(BPF_STX, 8),
  4122. BPF_STMT(BPF_LDX | BPF_MEM, 8),
  4123. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  4124. BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
  4125. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  4126. BPF_STMT(BPF_STX, 9),
  4127. BPF_STMT(BPF_LDX | BPF_MEM, 9),
  4128. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  4129. BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
  4130. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  4131. BPF_STMT(BPF_STX, 10),
  4132. BPF_STMT(BPF_LDX | BPF_MEM, 10),
  4133. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  4134. BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
  4135. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  4136. BPF_STMT(BPF_STX, 11),
  4137. BPF_STMT(BPF_LDX | BPF_MEM, 11),
  4138. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  4139. BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
  4140. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  4141. BPF_STMT(BPF_STX, 12),
  4142. BPF_STMT(BPF_LDX | BPF_MEM, 12),
  4143. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  4144. BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
  4145. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  4146. BPF_STMT(BPF_STX, 13),
  4147. BPF_STMT(BPF_LDX | BPF_MEM, 13),
  4148. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  4149. BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
  4150. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  4151. BPF_STMT(BPF_STX, 14),
  4152. BPF_STMT(BPF_LDX | BPF_MEM, 14),
  4153. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  4154. BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
  4155. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  4156. BPF_STMT(BPF_STX, 15),
  4157. BPF_STMT(BPF_LDX | BPF_MEM, 15),
  4158. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  4159. BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
  4160. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  4161. BPF_STMT(BPF_RET | BPF_A, 0),
  4162. },
  4163. CLASSIC | FLAG_NO_DATA,
  4164. { },
  4165. { { 0, 116 } },
  4166. },
  4167. { /* Mainly checking JIT here. */
  4168. "M[]: full STX + full LDX",
  4169. .u.insns = {
  4170. BPF_STMT(BPF_LDX | BPF_IMM, 0xbadfeedb),
  4171. BPF_STMT(BPF_STX, 0),
  4172. BPF_STMT(BPF_LDX | BPF_IMM, 0xecabedae),
  4173. BPF_STMT(BPF_STX, 1),
  4174. BPF_STMT(BPF_LDX | BPF_IMM, 0xafccfeaf),
  4175. BPF_STMT(BPF_STX, 2),
  4176. BPF_STMT(BPF_LDX | BPF_IMM, 0xbffdcedc),
  4177. BPF_STMT(BPF_STX, 3),
  4178. BPF_STMT(BPF_LDX | BPF_IMM, 0xfbbbdccb),
  4179. BPF_STMT(BPF_STX, 4),
  4180. BPF_STMT(BPF_LDX | BPF_IMM, 0xfbabcbda),
  4181. BPF_STMT(BPF_STX, 5),
  4182. BPF_STMT(BPF_LDX | BPF_IMM, 0xaedecbdb),
  4183. BPF_STMT(BPF_STX, 6),
  4184. BPF_STMT(BPF_LDX | BPF_IMM, 0xadebbade),
  4185. BPF_STMT(BPF_STX, 7),
  4186. BPF_STMT(BPF_LDX | BPF_IMM, 0xfcfcfaec),
  4187. BPF_STMT(BPF_STX, 8),
  4188. BPF_STMT(BPF_LDX | BPF_IMM, 0xbcdddbdc),
  4189. BPF_STMT(BPF_STX, 9),
  4190. BPF_STMT(BPF_LDX | BPF_IMM, 0xfeefdfac),
  4191. BPF_STMT(BPF_STX, 10),
  4192. BPF_STMT(BPF_LDX | BPF_IMM, 0xcddcdeea),
  4193. BPF_STMT(BPF_STX, 11),
  4194. BPF_STMT(BPF_LDX | BPF_IMM, 0xaccfaebb),
  4195. BPF_STMT(BPF_STX, 12),
  4196. BPF_STMT(BPF_LDX | BPF_IMM, 0xbdcccdcf),
  4197. BPF_STMT(BPF_STX, 13),
  4198. BPF_STMT(BPF_LDX | BPF_IMM, 0xaaedecde),
  4199. BPF_STMT(BPF_STX, 14),
  4200. BPF_STMT(BPF_LDX | BPF_IMM, 0xfaeacdad),
  4201. BPF_STMT(BPF_STX, 15),
  4202. BPF_STMT(BPF_LDX | BPF_MEM, 0),
  4203. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  4204. BPF_STMT(BPF_LDX | BPF_MEM, 1),
  4205. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  4206. BPF_STMT(BPF_LDX | BPF_MEM, 2),
  4207. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  4208. BPF_STMT(BPF_LDX | BPF_MEM, 3),
  4209. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  4210. BPF_STMT(BPF_LDX | BPF_MEM, 4),
  4211. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  4212. BPF_STMT(BPF_LDX | BPF_MEM, 5),
  4213. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  4214. BPF_STMT(BPF_LDX | BPF_MEM, 6),
  4215. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  4216. BPF_STMT(BPF_LDX | BPF_MEM, 7),
  4217. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  4218. BPF_STMT(BPF_LDX | BPF_MEM, 8),
  4219. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  4220. BPF_STMT(BPF_LDX | BPF_MEM, 9),
  4221. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  4222. BPF_STMT(BPF_LDX | BPF_MEM, 10),
  4223. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  4224. BPF_STMT(BPF_LDX | BPF_MEM, 11),
  4225. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  4226. BPF_STMT(BPF_LDX | BPF_MEM, 12),
  4227. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  4228. BPF_STMT(BPF_LDX | BPF_MEM, 13),
  4229. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  4230. BPF_STMT(BPF_LDX | BPF_MEM, 14),
  4231. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  4232. BPF_STMT(BPF_LDX | BPF_MEM, 15),
  4233. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  4234. BPF_STMT(BPF_RET | BPF_A, 0),
  4235. },
  4236. CLASSIC | FLAG_NO_DATA,
  4237. { },
  4238. { { 0, 0x2a5a5e5 } },
  4239. },
  4240. {
  4241. "check: SKF_AD_MAX",
  4242. .u.insns = {
  4243. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  4244. SKF_AD_OFF + SKF_AD_MAX),
  4245. BPF_STMT(BPF_RET | BPF_A, 0),
  4246. },
  4247. CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
  4248. { },
  4249. { },
  4250. .fill_helper = NULL,
  4251. .expected_errcode = -EINVAL,
  4252. },
  4253. { /* Passes checker but fails during runtime. */
  4254. "LD [SKF_AD_OFF-1]",
  4255. .u.insns = {
  4256. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  4257. SKF_AD_OFF - 1),
  4258. BPF_STMT(BPF_RET | BPF_K, 1),
  4259. },
  4260. CLASSIC,
  4261. { },
  4262. { { 1, 0 } },
  4263. },
  4264. {
  4265. "load 64-bit immediate",
  4266. .u.insns_int = {
  4267. BPF_LD_IMM64(R1, 0x567800001234LL),
  4268. BPF_MOV64_REG(R2, R1),
  4269. BPF_MOV64_REG(R3, R2),
  4270. BPF_ALU64_IMM(BPF_RSH, R2, 32),
  4271. BPF_ALU64_IMM(BPF_LSH, R3, 32),
  4272. BPF_ALU64_IMM(BPF_RSH, R3, 32),
  4273. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  4274. BPF_JMP_IMM(BPF_JEQ, R2, 0x5678, 1),
  4275. BPF_EXIT_INSN(),
  4276. BPF_JMP_IMM(BPF_JEQ, R3, 0x1234, 1),
  4277. BPF_EXIT_INSN(),
  4278. BPF_LD_IMM64(R0, 0x1ffffffffLL),
  4279. BPF_ALU64_IMM(BPF_RSH, R0, 32), /* R0 = 1 */
  4280. BPF_EXIT_INSN(),
  4281. },
  4282. INTERNAL,
  4283. { },
  4284. { { 0, 1 } }
  4285. },
  4286. /* BPF_ALU | BPF_MOV | BPF_X */
  4287. {
  4288. "ALU_MOV_X: dst = 2",
  4289. .u.insns_int = {
  4290. BPF_ALU32_IMM(BPF_MOV, R1, 2),
  4291. BPF_ALU32_REG(BPF_MOV, R0, R1),
  4292. BPF_EXIT_INSN(),
  4293. },
  4294. INTERNAL,
  4295. { },
  4296. { { 0, 2 } },
  4297. },
  4298. {
  4299. "ALU_MOV_X: dst = 4294967295",
  4300. .u.insns_int = {
  4301. BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
  4302. BPF_ALU32_REG(BPF_MOV, R0, R1),
  4303. BPF_EXIT_INSN(),
  4304. },
  4305. INTERNAL,
  4306. { },
  4307. { { 0, 4294967295U } },
  4308. },
  4309. {
  4310. "ALU64_MOV_X: dst = 2",
  4311. .u.insns_int = {
  4312. BPF_ALU32_IMM(BPF_MOV, R1, 2),
  4313. BPF_ALU64_REG(BPF_MOV, R0, R1),
  4314. BPF_EXIT_INSN(),
  4315. },
  4316. INTERNAL,
  4317. { },
  4318. { { 0, 2 } },
  4319. },
  4320. {
  4321. "ALU64_MOV_X: dst = 4294967295",
  4322. .u.insns_int = {
  4323. BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
  4324. BPF_ALU64_REG(BPF_MOV, R0, R1),
  4325. BPF_EXIT_INSN(),
  4326. },
  4327. INTERNAL,
  4328. { },
  4329. { { 0, 4294967295U } },
  4330. },
  4331. /* BPF_ALU | BPF_MOV | BPF_K */
  4332. {
  4333. "ALU_MOV_K: dst = 2",
  4334. .u.insns_int = {
  4335. BPF_ALU32_IMM(BPF_MOV, R0, 2),
  4336. BPF_EXIT_INSN(),
  4337. },
  4338. INTERNAL,
  4339. { },
  4340. { { 0, 2 } },
  4341. },
  4342. {
  4343. "ALU_MOV_K: dst = 4294967295",
  4344. .u.insns_int = {
  4345. BPF_ALU32_IMM(BPF_MOV, R0, 4294967295U),
  4346. BPF_EXIT_INSN(),
  4347. },
  4348. INTERNAL,
  4349. { },
  4350. { { 0, 4294967295U } },
  4351. },
  4352. {
  4353. "ALU_MOV_K: 0x0000ffffffff0000 = 0x00000000ffffffff",
  4354. .u.insns_int = {
  4355. BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
  4356. BPF_LD_IMM64(R3, 0x00000000ffffffffLL),
  4357. BPF_ALU32_IMM(BPF_MOV, R2, 0xffffffff),
  4358. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  4359. BPF_MOV32_IMM(R0, 2),
  4360. BPF_EXIT_INSN(),
  4361. BPF_MOV32_IMM(R0, 1),
  4362. BPF_EXIT_INSN(),
  4363. },
  4364. INTERNAL,
  4365. { },
  4366. { { 0, 0x1 } },
  4367. },
  4368. {
  4369. "ALU_MOV_K: small negative",
  4370. .u.insns_int = {
  4371. BPF_ALU32_IMM(BPF_MOV, R0, -123),
  4372. BPF_EXIT_INSN(),
  4373. },
  4374. INTERNAL,
  4375. { },
  4376. { { 0, -123 } }
  4377. },
  4378. {
  4379. "ALU_MOV_K: small negative zero extension",
  4380. .u.insns_int = {
  4381. BPF_ALU32_IMM(BPF_MOV, R0, -123),
  4382. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  4383. BPF_EXIT_INSN(),
  4384. },
  4385. INTERNAL,
  4386. { },
  4387. { { 0, 0 } }
  4388. },
  4389. {
  4390. "ALU_MOV_K: large negative",
  4391. .u.insns_int = {
  4392. BPF_ALU32_IMM(BPF_MOV, R0, -123456789),
  4393. BPF_EXIT_INSN(),
  4394. },
  4395. INTERNAL,
  4396. { },
  4397. { { 0, -123456789 } }
  4398. },
  4399. {
  4400. "ALU_MOV_K: large negative zero extension",
  4401. .u.insns_int = {
  4402. BPF_ALU32_IMM(BPF_MOV, R0, -123456789),
  4403. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  4404. BPF_EXIT_INSN(),
  4405. },
  4406. INTERNAL,
  4407. { },
  4408. { { 0, 0 } }
  4409. },
  4410. {
  4411. "ALU64_MOV_K: dst = 2",
  4412. .u.insns_int = {
  4413. BPF_ALU64_IMM(BPF_MOV, R0, 2),
  4414. BPF_EXIT_INSN(),
  4415. },
  4416. INTERNAL,
  4417. { },
  4418. { { 0, 2 } },
  4419. },
  4420. {
  4421. "ALU64_MOV_K: dst = 2147483647",
  4422. .u.insns_int = {
  4423. BPF_ALU64_IMM(BPF_MOV, R0, 2147483647),
  4424. BPF_EXIT_INSN(),
  4425. },
  4426. INTERNAL,
  4427. { },
  4428. { { 0, 2147483647 } },
  4429. },
  4430. {
  4431. "ALU64_OR_K: dst = 0x0",
  4432. .u.insns_int = {
  4433. BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
  4434. BPF_LD_IMM64(R3, 0x0),
  4435. BPF_ALU64_IMM(BPF_MOV, R2, 0x0),
  4436. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  4437. BPF_MOV32_IMM(R0, 2),
  4438. BPF_EXIT_INSN(),
  4439. BPF_MOV32_IMM(R0, 1),
  4440. BPF_EXIT_INSN(),
  4441. },
  4442. INTERNAL,
  4443. { },
  4444. { { 0, 0x1 } },
  4445. },
  4446. {
  4447. "ALU64_MOV_K: dst = -1",
  4448. .u.insns_int = {
  4449. BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
  4450. BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
  4451. BPF_ALU64_IMM(BPF_MOV, R2, 0xffffffff),
  4452. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  4453. BPF_MOV32_IMM(R0, 2),
  4454. BPF_EXIT_INSN(),
  4455. BPF_MOV32_IMM(R0, 1),
  4456. BPF_EXIT_INSN(),
  4457. },
  4458. INTERNAL,
  4459. { },
  4460. { { 0, 0x1 } },
  4461. },
  4462. {
  4463. "ALU64_MOV_K: small negative",
  4464. .u.insns_int = {
  4465. BPF_ALU64_IMM(BPF_MOV, R0, -123),
  4466. BPF_EXIT_INSN(),
  4467. },
  4468. INTERNAL,
  4469. { },
  4470. { { 0, -123 } }
  4471. },
  4472. {
  4473. "ALU64_MOV_K: small negative sign extension",
  4474. .u.insns_int = {
  4475. BPF_ALU64_IMM(BPF_MOV, R0, -123),
  4476. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  4477. BPF_EXIT_INSN(),
  4478. },
  4479. INTERNAL,
  4480. { },
  4481. { { 0, 0xffffffff } }
  4482. },
  4483. {
  4484. "ALU64_MOV_K: large negative",
  4485. .u.insns_int = {
  4486. BPF_ALU64_IMM(BPF_MOV, R0, -123456789),
  4487. BPF_EXIT_INSN(),
  4488. },
  4489. INTERNAL,
  4490. { },
  4491. { { 0, -123456789 } }
  4492. },
  4493. {
  4494. "ALU64_MOV_K: large negative sign extension",
  4495. .u.insns_int = {
  4496. BPF_ALU64_IMM(BPF_MOV, R0, -123456789),
  4497. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  4498. BPF_EXIT_INSN(),
  4499. },
  4500. INTERNAL,
  4501. { },
  4502. { { 0, 0xffffffff } }
  4503. },
  4504. /* BPF_ALU | BPF_ADD | BPF_X */
  4505. {
  4506. "ALU_ADD_X: 1 + 2 = 3",
  4507. .u.insns_int = {
  4508. BPF_LD_IMM64(R0, 1),
  4509. BPF_ALU32_IMM(BPF_MOV, R1, 2),
  4510. BPF_ALU32_REG(BPF_ADD, R0, R1),
  4511. BPF_EXIT_INSN(),
  4512. },
  4513. INTERNAL,
  4514. { },
  4515. { { 0, 3 } },
  4516. },
  4517. {
  4518. "ALU_ADD_X: 1 + 4294967294 = 4294967295",
  4519. .u.insns_int = {
  4520. BPF_LD_IMM64(R0, 1),
  4521. BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
  4522. BPF_ALU32_REG(BPF_ADD, R0, R1),
  4523. BPF_EXIT_INSN(),
  4524. },
  4525. INTERNAL,
  4526. { },
  4527. { { 0, 4294967295U } },
  4528. },
  4529. {
  4530. "ALU_ADD_X: 2 + 4294967294 = 0",
  4531. .u.insns_int = {
  4532. BPF_LD_IMM64(R0, 2),
  4533. BPF_LD_IMM64(R1, 4294967294U),
  4534. BPF_ALU32_REG(BPF_ADD, R0, R1),
  4535. BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
  4536. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  4537. BPF_EXIT_INSN(),
  4538. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  4539. BPF_EXIT_INSN(),
  4540. },
  4541. INTERNAL,
  4542. { },
  4543. { { 0, 1 } },
  4544. },
  4545. {
  4546. "ALU64_ADD_X: 1 + 2 = 3",
  4547. .u.insns_int = {
  4548. BPF_LD_IMM64(R0, 1),
  4549. BPF_ALU32_IMM(BPF_MOV, R1, 2),
  4550. BPF_ALU64_REG(BPF_ADD, R0, R1),
  4551. BPF_EXIT_INSN(),
  4552. },
  4553. INTERNAL,
  4554. { },
  4555. { { 0, 3 } },
  4556. },
  4557. {
  4558. "ALU64_ADD_X: 1 + 4294967294 = 4294967295",
  4559. .u.insns_int = {
  4560. BPF_LD_IMM64(R0, 1),
  4561. BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
  4562. BPF_ALU64_REG(BPF_ADD, R0, R1),
  4563. BPF_EXIT_INSN(),
  4564. },
  4565. INTERNAL,
  4566. { },
  4567. { { 0, 4294967295U } },
  4568. },
  4569. {
  4570. "ALU64_ADD_X: 2 + 4294967294 = 4294967296",
  4571. .u.insns_int = {
  4572. BPF_LD_IMM64(R0, 2),
  4573. BPF_LD_IMM64(R1, 4294967294U),
  4574. BPF_LD_IMM64(R2, 4294967296ULL),
  4575. BPF_ALU64_REG(BPF_ADD, R0, R1),
  4576. BPF_JMP_REG(BPF_JEQ, R0, R2, 2),
  4577. BPF_MOV32_IMM(R0, 0),
  4578. BPF_EXIT_INSN(),
  4579. BPF_MOV32_IMM(R0, 1),
  4580. BPF_EXIT_INSN(),
  4581. },
  4582. INTERNAL,
  4583. { },
  4584. { { 0, 1 } },
  4585. },
  4586. /* BPF_ALU | BPF_ADD | BPF_K */
  4587. {
  4588. "ALU_ADD_K: 1 + 2 = 3",
  4589. .u.insns_int = {
  4590. BPF_LD_IMM64(R0, 1),
  4591. BPF_ALU32_IMM(BPF_ADD, R0, 2),
  4592. BPF_EXIT_INSN(),
  4593. },
  4594. INTERNAL,
  4595. { },
  4596. { { 0, 3 } },
  4597. },
  4598. {
  4599. "ALU_ADD_K: 3 + 0 = 3",
  4600. .u.insns_int = {
  4601. BPF_LD_IMM64(R0, 3),
  4602. BPF_ALU32_IMM(BPF_ADD, R0, 0),
  4603. BPF_EXIT_INSN(),
  4604. },
  4605. INTERNAL,
  4606. { },
  4607. { { 0, 3 } },
  4608. },
  4609. {
  4610. "ALU_ADD_K: 1 + 4294967294 = 4294967295",
  4611. .u.insns_int = {
  4612. BPF_LD_IMM64(R0, 1),
  4613. BPF_ALU32_IMM(BPF_ADD, R0, 4294967294U),
  4614. BPF_EXIT_INSN(),
  4615. },
  4616. INTERNAL,
  4617. { },
  4618. { { 0, 4294967295U } },
  4619. },
  4620. {
  4621. "ALU_ADD_K: 4294967294 + 2 = 0",
  4622. .u.insns_int = {
  4623. BPF_LD_IMM64(R0, 4294967294U),
  4624. BPF_ALU32_IMM(BPF_ADD, R0, 2),
  4625. BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
  4626. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  4627. BPF_EXIT_INSN(),
  4628. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  4629. BPF_EXIT_INSN(),
  4630. },
  4631. INTERNAL,
  4632. { },
  4633. { { 0, 1 } },
  4634. },
  4635. {
  4636. "ALU_ADD_K: 0 + (-1) = 0x00000000ffffffff",
  4637. .u.insns_int = {
  4638. BPF_LD_IMM64(R2, 0x0),
  4639. BPF_LD_IMM64(R3, 0x00000000ffffffff),
  4640. BPF_ALU32_IMM(BPF_ADD, R2, 0xffffffff),
  4641. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  4642. BPF_MOV32_IMM(R0, 2),
  4643. BPF_EXIT_INSN(),
  4644. BPF_MOV32_IMM(R0, 1),
  4645. BPF_EXIT_INSN(),
  4646. },
  4647. INTERNAL,
  4648. { },
  4649. { { 0, 0x1 } },
  4650. },
  4651. {
  4652. "ALU_ADD_K: 0 + 0xffff = 0xffff",
  4653. .u.insns_int = {
  4654. BPF_LD_IMM64(R2, 0x0),
  4655. BPF_LD_IMM64(R3, 0xffff),
  4656. BPF_ALU32_IMM(BPF_ADD, R2, 0xffff),
  4657. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  4658. BPF_MOV32_IMM(R0, 2),
  4659. BPF_EXIT_INSN(),
  4660. BPF_MOV32_IMM(R0, 1),
  4661. BPF_EXIT_INSN(),
  4662. },
  4663. INTERNAL,
  4664. { },
  4665. { { 0, 0x1 } },
  4666. },
  4667. {
  4668. "ALU_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
  4669. .u.insns_int = {
  4670. BPF_LD_IMM64(R2, 0x0),
  4671. BPF_LD_IMM64(R3, 0x7fffffff),
  4672. BPF_ALU32_IMM(BPF_ADD, R2, 0x7fffffff),
  4673. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  4674. BPF_MOV32_IMM(R0, 2),
  4675. BPF_EXIT_INSN(),
  4676. BPF_MOV32_IMM(R0, 1),
  4677. BPF_EXIT_INSN(),
  4678. },
  4679. INTERNAL,
  4680. { },
  4681. { { 0, 0x1 } },
  4682. },
  4683. {
  4684. "ALU_ADD_K: 0 + 0x80000000 = 0x80000000",
  4685. .u.insns_int = {
  4686. BPF_LD_IMM64(R2, 0x0),
  4687. BPF_LD_IMM64(R3, 0x80000000),
  4688. BPF_ALU32_IMM(BPF_ADD, R2, 0x80000000),
  4689. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  4690. BPF_MOV32_IMM(R0, 2),
  4691. BPF_EXIT_INSN(),
  4692. BPF_MOV32_IMM(R0, 1),
  4693. BPF_EXIT_INSN(),
  4694. },
  4695. INTERNAL,
  4696. { },
  4697. { { 0, 0x1 } },
  4698. },
  4699. {
  4700. "ALU_ADD_K: 0 + 0x80008000 = 0x80008000",
  4701. .u.insns_int = {
  4702. BPF_LD_IMM64(R2, 0x0),
  4703. BPF_LD_IMM64(R3, 0x80008000),
  4704. BPF_ALU32_IMM(BPF_ADD, R2, 0x80008000),
  4705. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  4706. BPF_MOV32_IMM(R0, 2),
  4707. BPF_EXIT_INSN(),
  4708. BPF_MOV32_IMM(R0, 1),
  4709. BPF_EXIT_INSN(),
  4710. },
  4711. INTERNAL,
  4712. { },
  4713. { { 0, 0x1 } },
  4714. },
  4715. {
  4716. "ALU64_ADD_K: 1 + 2 = 3",
  4717. .u.insns_int = {
  4718. BPF_LD_IMM64(R0, 1),
  4719. BPF_ALU64_IMM(BPF_ADD, R0, 2),
  4720. BPF_EXIT_INSN(),
  4721. },
  4722. INTERNAL,
  4723. { },
  4724. { { 0, 3 } },
  4725. },
  4726. {
  4727. "ALU64_ADD_K: 3 + 0 = 3",
  4728. .u.insns_int = {
  4729. BPF_LD_IMM64(R0, 3),
  4730. BPF_ALU64_IMM(BPF_ADD, R0, 0),
  4731. BPF_EXIT_INSN(),
  4732. },
  4733. INTERNAL,
  4734. { },
  4735. { { 0, 3 } },
  4736. },
  4737. {
  4738. "ALU64_ADD_K: 1 + 2147483646 = 2147483647",
  4739. .u.insns_int = {
  4740. BPF_LD_IMM64(R0, 1),
  4741. BPF_ALU64_IMM(BPF_ADD, R0, 2147483646),
  4742. BPF_EXIT_INSN(),
  4743. },
  4744. INTERNAL,
  4745. { },
  4746. { { 0, 2147483647 } },
  4747. },
  4748. {
  4749. "ALU64_ADD_K: 4294967294 + 2 = 4294967296",
  4750. .u.insns_int = {
  4751. BPF_LD_IMM64(R0, 4294967294U),
  4752. BPF_LD_IMM64(R1, 4294967296ULL),
  4753. BPF_ALU64_IMM(BPF_ADD, R0, 2),
  4754. BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
  4755. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  4756. BPF_EXIT_INSN(),
  4757. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  4758. BPF_EXIT_INSN(),
  4759. },
  4760. INTERNAL,
  4761. { },
  4762. { { 0, 1 } },
  4763. },
  4764. {
  4765. "ALU64_ADD_K: 2147483646 + -2147483647 = -1",
  4766. .u.insns_int = {
  4767. BPF_LD_IMM64(R0, 2147483646),
  4768. BPF_ALU64_IMM(BPF_ADD, R0, -2147483647),
  4769. BPF_EXIT_INSN(),
  4770. },
  4771. INTERNAL,
  4772. { },
  4773. { { 0, -1 } },
  4774. },
  4775. {
  4776. "ALU64_ADD_K: 1 + 0 = 1",
  4777. .u.insns_int = {
  4778. BPF_LD_IMM64(R2, 0x1),
  4779. BPF_LD_IMM64(R3, 0x1),
  4780. BPF_ALU64_IMM(BPF_ADD, R2, 0x0),
  4781. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  4782. BPF_MOV32_IMM(R0, 2),
  4783. BPF_EXIT_INSN(),
  4784. BPF_MOV32_IMM(R0, 1),
  4785. BPF_EXIT_INSN(),
  4786. },
  4787. INTERNAL,
  4788. { },
  4789. { { 0, 0x1 } },
  4790. },
  4791. {
  4792. "ALU64_ADD_K: 0 + (-1) = 0xffffffffffffffff",
  4793. .u.insns_int = {
  4794. BPF_LD_IMM64(R2, 0x0),
  4795. BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
  4796. BPF_ALU64_IMM(BPF_ADD, R2, 0xffffffff),
  4797. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  4798. BPF_MOV32_IMM(R0, 2),
  4799. BPF_EXIT_INSN(),
  4800. BPF_MOV32_IMM(R0, 1),
  4801. BPF_EXIT_INSN(),
  4802. },
  4803. INTERNAL,
  4804. { },
  4805. { { 0, 0x1 } },
  4806. },
  4807. {
  4808. "ALU64_ADD_K: 0 + 0xffff = 0xffff",
  4809. .u.insns_int = {
  4810. BPF_LD_IMM64(R2, 0x0),
  4811. BPF_LD_IMM64(R3, 0xffff),
  4812. BPF_ALU64_IMM(BPF_ADD, R2, 0xffff),
  4813. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  4814. BPF_MOV32_IMM(R0, 2),
  4815. BPF_EXIT_INSN(),
  4816. BPF_MOV32_IMM(R0, 1),
  4817. BPF_EXIT_INSN(),
  4818. },
  4819. INTERNAL,
  4820. { },
  4821. { { 0, 0x1 } },
  4822. },
  4823. {
  4824. "ALU64_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
  4825. .u.insns_int = {
  4826. BPF_LD_IMM64(R2, 0x0),
  4827. BPF_LD_IMM64(R3, 0x7fffffff),
  4828. BPF_ALU64_IMM(BPF_ADD, R2, 0x7fffffff),
  4829. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  4830. BPF_MOV32_IMM(R0, 2),
  4831. BPF_EXIT_INSN(),
  4832. BPF_MOV32_IMM(R0, 1),
  4833. BPF_EXIT_INSN(),
  4834. },
  4835. INTERNAL,
  4836. { },
  4837. { { 0, 0x1 } },
  4838. },
  4839. {
  4840. "ALU64_ADD_K: 0 + 0x80000000 = 0xffffffff80000000",
  4841. .u.insns_int = {
  4842. BPF_LD_IMM64(R2, 0x0),
  4843. BPF_LD_IMM64(R3, 0xffffffff80000000LL),
  4844. BPF_ALU64_IMM(BPF_ADD, R2, 0x80000000),
  4845. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  4846. BPF_MOV32_IMM(R0, 2),
  4847. BPF_EXIT_INSN(),
  4848. BPF_MOV32_IMM(R0, 1),
  4849. BPF_EXIT_INSN(),
  4850. },
  4851. INTERNAL,
  4852. { },
  4853. { { 0, 0x1 } },
  4854. },
  4855. {
  4856. "ALU_ADD_K: 0 + 0x80008000 = 0xffffffff80008000",
  4857. .u.insns_int = {
  4858. BPF_LD_IMM64(R2, 0x0),
  4859. BPF_LD_IMM64(R3, 0xffffffff80008000LL),
  4860. BPF_ALU64_IMM(BPF_ADD, R2, 0x80008000),
  4861. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  4862. BPF_MOV32_IMM(R0, 2),
  4863. BPF_EXIT_INSN(),
  4864. BPF_MOV32_IMM(R0, 1),
  4865. BPF_EXIT_INSN(),
  4866. },
  4867. INTERNAL,
  4868. { },
  4869. { { 0, 0x1 } },
  4870. },
  4871. /* BPF_ALU | BPF_SUB | BPF_X */
  4872. {
  4873. "ALU_SUB_X: 3 - 1 = 2",
  4874. .u.insns_int = {
  4875. BPF_LD_IMM64(R0, 3),
  4876. BPF_ALU32_IMM(BPF_MOV, R1, 1),
  4877. BPF_ALU32_REG(BPF_SUB, R0, R1),
  4878. BPF_EXIT_INSN(),
  4879. },
  4880. INTERNAL,
  4881. { },
  4882. { { 0, 2 } },
  4883. },
  4884. {
  4885. "ALU_SUB_X: 4294967295 - 4294967294 = 1",
  4886. .u.insns_int = {
  4887. BPF_LD_IMM64(R0, 4294967295U),
  4888. BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
  4889. BPF_ALU32_REG(BPF_SUB, R0, R1),
  4890. BPF_EXIT_INSN(),
  4891. },
  4892. INTERNAL,
  4893. { },
  4894. { { 0, 1 } },
  4895. },
  4896. {
  4897. "ALU64_SUB_X: 3 - 1 = 2",
  4898. .u.insns_int = {
  4899. BPF_LD_IMM64(R0, 3),
  4900. BPF_ALU32_IMM(BPF_MOV, R1, 1),
  4901. BPF_ALU64_REG(BPF_SUB, R0, R1),
  4902. BPF_EXIT_INSN(),
  4903. },
  4904. INTERNAL,
  4905. { },
  4906. { { 0, 2 } },
  4907. },
  4908. {
  4909. "ALU64_SUB_X: 4294967295 - 4294967294 = 1",
  4910. .u.insns_int = {
  4911. BPF_LD_IMM64(R0, 4294967295U),
  4912. BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
  4913. BPF_ALU64_REG(BPF_SUB, R0, R1),
  4914. BPF_EXIT_INSN(),
  4915. },
  4916. INTERNAL,
  4917. { },
  4918. { { 0, 1 } },
  4919. },
  4920. /* BPF_ALU | BPF_SUB | BPF_K */
  4921. {
  4922. "ALU_SUB_K: 3 - 1 = 2",
  4923. .u.insns_int = {
  4924. BPF_LD_IMM64(R0, 3),
  4925. BPF_ALU32_IMM(BPF_SUB, R0, 1),
  4926. BPF_EXIT_INSN(),
  4927. },
  4928. INTERNAL,
  4929. { },
  4930. { { 0, 2 } },
  4931. },
  4932. {
  4933. "ALU_SUB_K: 3 - 0 = 3",
  4934. .u.insns_int = {
  4935. BPF_LD_IMM64(R0, 3),
  4936. BPF_ALU32_IMM(BPF_SUB, R0, 0),
  4937. BPF_EXIT_INSN(),
  4938. },
  4939. INTERNAL,
  4940. { },
  4941. { { 0, 3 } },
  4942. },
  4943. {
  4944. "ALU_SUB_K: 4294967295 - 4294967294 = 1",
  4945. .u.insns_int = {
  4946. BPF_LD_IMM64(R0, 4294967295U),
  4947. BPF_ALU32_IMM(BPF_SUB, R0, 4294967294U),
  4948. BPF_EXIT_INSN(),
  4949. },
  4950. INTERNAL,
  4951. { },
  4952. { { 0, 1 } },
  4953. },
  4954. {
  4955. "ALU64_SUB_K: 3 - 1 = 2",
  4956. .u.insns_int = {
  4957. BPF_LD_IMM64(R0, 3),
  4958. BPF_ALU64_IMM(BPF_SUB, R0, 1),
  4959. BPF_EXIT_INSN(),
  4960. },
  4961. INTERNAL,
  4962. { },
  4963. { { 0, 2 } },
  4964. },
  4965. {
  4966. "ALU64_SUB_K: 3 - 0 = 3",
  4967. .u.insns_int = {
  4968. BPF_LD_IMM64(R0, 3),
  4969. BPF_ALU64_IMM(BPF_SUB, R0, 0),
  4970. BPF_EXIT_INSN(),
  4971. },
  4972. INTERNAL,
  4973. { },
  4974. { { 0, 3 } },
  4975. },
  4976. {
  4977. "ALU64_SUB_K: 4294967294 - 4294967295 = -1",
  4978. .u.insns_int = {
  4979. BPF_LD_IMM64(R0, 4294967294U),
  4980. BPF_ALU64_IMM(BPF_SUB, R0, 4294967295U),
  4981. BPF_EXIT_INSN(),
  4982. },
  4983. INTERNAL,
  4984. { },
  4985. { { 0, -1 } },
  4986. },
  4987. {
  4988. "ALU64_ADD_K: 2147483646 - 2147483647 = -1",
  4989. .u.insns_int = {
  4990. BPF_LD_IMM64(R0, 2147483646),
  4991. BPF_ALU64_IMM(BPF_SUB, R0, 2147483647),
  4992. BPF_EXIT_INSN(),
  4993. },
  4994. INTERNAL,
  4995. { },
  4996. { { 0, -1 } },
  4997. },
  4998. /* BPF_ALU | BPF_MUL | BPF_X */
  4999. {
  5000. "ALU_MUL_X: 2 * 3 = 6",
  5001. .u.insns_int = {
  5002. BPF_LD_IMM64(R0, 2),
  5003. BPF_ALU32_IMM(BPF_MOV, R1, 3),
  5004. BPF_ALU32_REG(BPF_MUL, R0, R1),
  5005. BPF_EXIT_INSN(),
  5006. },
  5007. INTERNAL,
  5008. { },
  5009. { { 0, 6 } },
  5010. },
  5011. {
  5012. "ALU_MUL_X: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
  5013. .u.insns_int = {
  5014. BPF_LD_IMM64(R0, 2),
  5015. BPF_ALU32_IMM(BPF_MOV, R1, 0x7FFFFFF8),
  5016. BPF_ALU32_REG(BPF_MUL, R0, R1),
  5017. BPF_EXIT_INSN(),
  5018. },
  5019. INTERNAL,
  5020. { },
  5021. { { 0, 0xFFFFFFF0 } },
  5022. },
  5023. {
  5024. "ALU_MUL_X: -1 * -1 = 1",
  5025. .u.insns_int = {
  5026. BPF_LD_IMM64(R0, -1),
  5027. BPF_ALU32_IMM(BPF_MOV, R1, -1),
  5028. BPF_ALU32_REG(BPF_MUL, R0, R1),
  5029. BPF_EXIT_INSN(),
  5030. },
  5031. INTERNAL,
  5032. { },
  5033. { { 0, 1 } },
  5034. },
  5035. {
  5036. "ALU64_MUL_X: 2 * 3 = 6",
  5037. .u.insns_int = {
  5038. BPF_LD_IMM64(R0, 2),
  5039. BPF_ALU32_IMM(BPF_MOV, R1, 3),
  5040. BPF_ALU64_REG(BPF_MUL, R0, R1),
  5041. BPF_EXIT_INSN(),
  5042. },
  5043. INTERNAL,
  5044. { },
  5045. { { 0, 6 } },
  5046. },
  5047. {
  5048. "ALU64_MUL_X: 1 * 2147483647 = 2147483647",
  5049. .u.insns_int = {
  5050. BPF_LD_IMM64(R0, 1),
  5051. BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
  5052. BPF_ALU64_REG(BPF_MUL, R0, R1),
  5053. BPF_EXIT_INSN(),
  5054. },
  5055. INTERNAL,
  5056. { },
  5057. { { 0, 2147483647 } },
  5058. },
  5059. {
  5060. "ALU64_MUL_X: 64x64 multiply, low word",
  5061. .u.insns_int = {
  5062. BPF_LD_IMM64(R0, 0x0fedcba987654321LL),
  5063. BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
  5064. BPF_ALU64_REG(BPF_MUL, R0, R1),
  5065. BPF_EXIT_INSN(),
  5066. },
  5067. INTERNAL,
  5068. { },
  5069. { { 0, 0xe5618cf0 } }
  5070. },
  5071. {
  5072. "ALU64_MUL_X: 64x64 multiply, high word",
  5073. .u.insns_int = {
  5074. BPF_LD_IMM64(R0, 0x0fedcba987654321LL),
  5075. BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
  5076. BPF_ALU64_REG(BPF_MUL, R0, R1),
  5077. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  5078. BPF_EXIT_INSN(),
  5079. },
  5080. INTERNAL,
  5081. { },
  5082. { { 0, 0x2236d88f } }
  5083. },
  5084. /* BPF_ALU | BPF_MUL | BPF_K */
  5085. {
  5086. "ALU_MUL_K: 2 * 3 = 6",
  5087. .u.insns_int = {
  5088. BPF_LD_IMM64(R0, 2),
  5089. BPF_ALU32_IMM(BPF_MUL, R0, 3),
  5090. BPF_EXIT_INSN(),
  5091. },
  5092. INTERNAL,
  5093. { },
  5094. { { 0, 6 } },
  5095. },
  5096. {
  5097. "ALU_MUL_K: 3 * 1 = 3",
  5098. .u.insns_int = {
  5099. BPF_LD_IMM64(R0, 3),
  5100. BPF_ALU32_IMM(BPF_MUL, R0, 1),
  5101. BPF_EXIT_INSN(),
  5102. },
  5103. INTERNAL,
  5104. { },
  5105. { { 0, 3 } },
  5106. },
  5107. {
  5108. "ALU_MUL_K: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
  5109. .u.insns_int = {
  5110. BPF_LD_IMM64(R0, 2),
  5111. BPF_ALU32_IMM(BPF_MUL, R0, 0x7FFFFFF8),
  5112. BPF_EXIT_INSN(),
  5113. },
  5114. INTERNAL,
  5115. { },
  5116. { { 0, 0xFFFFFFF0 } },
  5117. },
  5118. {
  5119. "ALU_MUL_K: 1 * (-1) = 0x00000000ffffffff",
  5120. .u.insns_int = {
  5121. BPF_LD_IMM64(R2, 0x1),
  5122. BPF_LD_IMM64(R3, 0x00000000ffffffff),
  5123. BPF_ALU32_IMM(BPF_MUL, R2, 0xffffffff),
  5124. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  5125. BPF_MOV32_IMM(R0, 2),
  5126. BPF_EXIT_INSN(),
  5127. BPF_MOV32_IMM(R0, 1),
  5128. BPF_EXIT_INSN(),
  5129. },
  5130. INTERNAL,
  5131. { },
  5132. { { 0, 0x1 } },
  5133. },
  5134. {
  5135. "ALU64_MUL_K: 2 * 3 = 6",
  5136. .u.insns_int = {
  5137. BPF_LD_IMM64(R0, 2),
  5138. BPF_ALU64_IMM(BPF_MUL, R0, 3),
  5139. BPF_EXIT_INSN(),
  5140. },
  5141. INTERNAL,
  5142. { },
  5143. { { 0, 6 } },
  5144. },
  5145. {
  5146. "ALU64_MUL_K: 3 * 1 = 3",
  5147. .u.insns_int = {
  5148. BPF_LD_IMM64(R0, 3),
  5149. BPF_ALU64_IMM(BPF_MUL, R0, 1),
  5150. BPF_EXIT_INSN(),
  5151. },
  5152. INTERNAL,
  5153. { },
  5154. { { 0, 3 } },
  5155. },
  5156. {
  5157. "ALU64_MUL_K: 1 * 2147483647 = 2147483647",
  5158. .u.insns_int = {
  5159. BPF_LD_IMM64(R0, 1),
  5160. BPF_ALU64_IMM(BPF_MUL, R0, 2147483647),
  5161. BPF_EXIT_INSN(),
  5162. },
  5163. INTERNAL,
  5164. { },
  5165. { { 0, 2147483647 } },
  5166. },
  5167. {
  5168. "ALU64_MUL_K: 1 * -2147483647 = -2147483647",
  5169. .u.insns_int = {
  5170. BPF_LD_IMM64(R0, 1),
  5171. BPF_ALU64_IMM(BPF_MUL, R0, -2147483647),
  5172. BPF_EXIT_INSN(),
  5173. },
  5174. INTERNAL,
  5175. { },
  5176. { { 0, -2147483647 } },
  5177. },
  5178. {
  5179. "ALU64_MUL_K: 1 * (-1) = 0xffffffffffffffff",
  5180. .u.insns_int = {
  5181. BPF_LD_IMM64(R2, 0x1),
  5182. BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
  5183. BPF_ALU64_IMM(BPF_MUL, R2, 0xffffffff),
  5184. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  5185. BPF_MOV32_IMM(R0, 2),
  5186. BPF_EXIT_INSN(),
  5187. BPF_MOV32_IMM(R0, 1),
  5188. BPF_EXIT_INSN(),
  5189. },
  5190. INTERNAL,
  5191. { },
  5192. { { 0, 0x1 } },
  5193. },
  5194. {
  5195. "ALU64_MUL_K: 64x32 multiply, low word",
  5196. .u.insns_int = {
  5197. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  5198. BPF_ALU64_IMM(BPF_MUL, R0, 0x12345678),
  5199. BPF_EXIT_INSN(),
  5200. },
  5201. INTERNAL,
  5202. { },
  5203. { { 0, 0xe242d208 } }
  5204. },
  5205. {
  5206. "ALU64_MUL_K: 64x32 multiply, high word",
  5207. .u.insns_int = {
  5208. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  5209. BPF_ALU64_IMM(BPF_MUL, R0, 0x12345678),
  5210. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  5211. BPF_EXIT_INSN(),
  5212. },
  5213. INTERNAL,
  5214. { },
  5215. { { 0, 0xc28f5c28 } }
  5216. },
  5217. /* BPF_ALU | BPF_DIV | BPF_X */
  5218. {
  5219. "ALU_DIV_X: 6 / 2 = 3",
  5220. .u.insns_int = {
  5221. BPF_LD_IMM64(R0, 6),
  5222. BPF_ALU32_IMM(BPF_MOV, R1, 2),
  5223. BPF_ALU32_REG(BPF_DIV, R0, R1),
  5224. BPF_EXIT_INSN(),
  5225. },
  5226. INTERNAL,
  5227. { },
  5228. { { 0, 3 } },
  5229. },
  5230. {
  5231. "ALU_DIV_X: 4294967295 / 4294967295 = 1",
  5232. .u.insns_int = {
  5233. BPF_LD_IMM64(R0, 4294967295U),
  5234. BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
  5235. BPF_ALU32_REG(BPF_DIV, R0, R1),
  5236. BPF_EXIT_INSN(),
  5237. },
  5238. INTERNAL,
  5239. { },
  5240. { { 0, 1 } },
  5241. },
  5242. {
  5243. "ALU64_DIV_X: 6 / 2 = 3",
  5244. .u.insns_int = {
  5245. BPF_LD_IMM64(R0, 6),
  5246. BPF_ALU32_IMM(BPF_MOV, R1, 2),
  5247. BPF_ALU64_REG(BPF_DIV, R0, R1),
  5248. BPF_EXIT_INSN(),
  5249. },
  5250. INTERNAL,
  5251. { },
  5252. { { 0, 3 } },
  5253. },
  5254. {
  5255. "ALU64_DIV_X: 2147483647 / 2147483647 = 1",
  5256. .u.insns_int = {
  5257. BPF_LD_IMM64(R0, 2147483647),
  5258. BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
  5259. BPF_ALU64_REG(BPF_DIV, R0, R1),
  5260. BPF_EXIT_INSN(),
  5261. },
  5262. INTERNAL,
  5263. { },
  5264. { { 0, 1 } },
  5265. },
  5266. {
  5267. "ALU64_DIV_X: 0xffffffffffffffff / (-1) = 0x0000000000000001",
  5268. .u.insns_int = {
  5269. BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
  5270. BPF_LD_IMM64(R4, 0xffffffffffffffffLL),
  5271. BPF_LD_IMM64(R3, 0x0000000000000001LL),
  5272. BPF_ALU64_REG(BPF_DIV, R2, R4),
  5273. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  5274. BPF_MOV32_IMM(R0, 2),
  5275. BPF_EXIT_INSN(),
  5276. BPF_MOV32_IMM(R0, 1),
  5277. BPF_EXIT_INSN(),
  5278. },
  5279. INTERNAL,
  5280. { },
  5281. { { 0, 0x1 } },
  5282. },
  5283. /* BPF_ALU | BPF_DIV | BPF_K */
  5284. {
  5285. "ALU_DIV_K: 6 / 2 = 3",
  5286. .u.insns_int = {
  5287. BPF_LD_IMM64(R0, 6),
  5288. BPF_ALU32_IMM(BPF_DIV, R0, 2),
  5289. BPF_EXIT_INSN(),
  5290. },
  5291. INTERNAL,
  5292. { },
  5293. { { 0, 3 } },
  5294. },
  5295. {
  5296. "ALU_DIV_K: 3 / 1 = 3",
  5297. .u.insns_int = {
  5298. BPF_LD_IMM64(R0, 3),
  5299. BPF_ALU32_IMM(BPF_DIV, R0, 1),
  5300. BPF_EXIT_INSN(),
  5301. },
  5302. INTERNAL,
  5303. { },
  5304. { { 0, 3 } },
  5305. },
  5306. {
  5307. "ALU_DIV_K: 4294967295 / 4294967295 = 1",
  5308. .u.insns_int = {
  5309. BPF_LD_IMM64(R0, 4294967295U),
  5310. BPF_ALU32_IMM(BPF_DIV, R0, 4294967295U),
  5311. BPF_EXIT_INSN(),
  5312. },
  5313. INTERNAL,
  5314. { },
  5315. { { 0, 1 } },
  5316. },
  5317. {
  5318. "ALU_DIV_K: 0xffffffffffffffff / (-1) = 0x1",
  5319. .u.insns_int = {
  5320. BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
  5321. BPF_LD_IMM64(R3, 0x1UL),
  5322. BPF_ALU32_IMM(BPF_DIV, R2, 0xffffffff),
  5323. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  5324. BPF_MOV32_IMM(R0, 2),
  5325. BPF_EXIT_INSN(),
  5326. BPF_MOV32_IMM(R0, 1),
  5327. BPF_EXIT_INSN(),
  5328. },
  5329. INTERNAL,
  5330. { },
  5331. { { 0, 0x1 } },
  5332. },
  5333. {
  5334. "ALU64_DIV_K: 6 / 2 = 3",
  5335. .u.insns_int = {
  5336. BPF_LD_IMM64(R0, 6),
  5337. BPF_ALU64_IMM(BPF_DIV, R0, 2),
  5338. BPF_EXIT_INSN(),
  5339. },
  5340. INTERNAL,
  5341. { },
  5342. { { 0, 3 } },
  5343. },
  5344. {
  5345. "ALU64_DIV_K: 3 / 1 = 3",
  5346. .u.insns_int = {
  5347. BPF_LD_IMM64(R0, 3),
  5348. BPF_ALU64_IMM(BPF_DIV, R0, 1),
  5349. BPF_EXIT_INSN(),
  5350. },
  5351. INTERNAL,
  5352. { },
  5353. { { 0, 3 } },
  5354. },
  5355. {
  5356. "ALU64_DIV_K: 2147483647 / 2147483647 = 1",
  5357. .u.insns_int = {
  5358. BPF_LD_IMM64(R0, 2147483647),
  5359. BPF_ALU64_IMM(BPF_DIV, R0, 2147483647),
  5360. BPF_EXIT_INSN(),
  5361. },
  5362. INTERNAL,
  5363. { },
  5364. { { 0, 1 } },
  5365. },
  5366. {
  5367. "ALU64_DIV_K: 0xffffffffffffffff / (-1) = 0x0000000000000001",
  5368. .u.insns_int = {
  5369. BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
  5370. BPF_LD_IMM64(R3, 0x0000000000000001LL),
  5371. BPF_ALU64_IMM(BPF_DIV, R2, 0xffffffff),
  5372. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  5373. BPF_MOV32_IMM(R0, 2),
  5374. BPF_EXIT_INSN(),
  5375. BPF_MOV32_IMM(R0, 1),
  5376. BPF_EXIT_INSN(),
  5377. },
  5378. INTERNAL,
  5379. { },
  5380. { { 0, 0x1 } },
  5381. },
  5382. /* BPF_ALU | BPF_MOD | BPF_X */
  5383. {
  5384. "ALU_MOD_X: 3 % 2 = 1",
  5385. .u.insns_int = {
  5386. BPF_LD_IMM64(R0, 3),
  5387. BPF_ALU32_IMM(BPF_MOV, R1, 2),
  5388. BPF_ALU32_REG(BPF_MOD, R0, R1),
  5389. BPF_EXIT_INSN(),
  5390. },
  5391. INTERNAL,
  5392. { },
  5393. { { 0, 1 } },
  5394. },
  5395. {
  5396. "ALU_MOD_X: 4294967295 % 4294967293 = 2",
  5397. .u.insns_int = {
  5398. BPF_LD_IMM64(R0, 4294967295U),
  5399. BPF_ALU32_IMM(BPF_MOV, R1, 4294967293U),
  5400. BPF_ALU32_REG(BPF_MOD, R0, R1),
  5401. BPF_EXIT_INSN(),
  5402. },
  5403. INTERNAL,
  5404. { },
  5405. { { 0, 2 } },
  5406. },
  5407. {
  5408. "ALU64_MOD_X: 3 % 2 = 1",
  5409. .u.insns_int = {
  5410. BPF_LD_IMM64(R0, 3),
  5411. BPF_ALU32_IMM(BPF_MOV, R1, 2),
  5412. BPF_ALU64_REG(BPF_MOD, R0, R1),
  5413. BPF_EXIT_INSN(),
  5414. },
  5415. INTERNAL,
  5416. { },
  5417. { { 0, 1 } },
  5418. },
  5419. {
  5420. "ALU64_MOD_X: 2147483647 % 2147483645 = 2",
  5421. .u.insns_int = {
  5422. BPF_LD_IMM64(R0, 2147483647),
  5423. BPF_ALU32_IMM(BPF_MOV, R1, 2147483645),
  5424. BPF_ALU64_REG(BPF_MOD, R0, R1),
  5425. BPF_EXIT_INSN(),
  5426. },
  5427. INTERNAL,
  5428. { },
  5429. { { 0, 2 } },
  5430. },
  5431. /* BPF_ALU | BPF_MOD | BPF_K */
  5432. {
  5433. "ALU_MOD_K: 3 % 2 = 1",
  5434. .u.insns_int = {
  5435. BPF_LD_IMM64(R0, 3),
  5436. BPF_ALU32_IMM(BPF_MOD, R0, 2),
  5437. BPF_EXIT_INSN(),
  5438. },
  5439. INTERNAL,
  5440. { },
  5441. { { 0, 1 } },
  5442. },
  5443. {
  5444. "ALU_MOD_K: 3 % 1 = 0",
  5445. .u.insns_int = {
  5446. BPF_LD_IMM64(R0, 3),
  5447. BPF_ALU32_IMM(BPF_MOD, R0, 1),
  5448. BPF_EXIT_INSN(),
  5449. },
  5450. INTERNAL,
  5451. { },
  5452. { { 0, 0 } },
  5453. },
  5454. {
  5455. "ALU_MOD_K: 4294967295 % 4294967293 = 2",
  5456. .u.insns_int = {
  5457. BPF_LD_IMM64(R0, 4294967295U),
  5458. BPF_ALU32_IMM(BPF_MOD, R0, 4294967293U),
  5459. BPF_EXIT_INSN(),
  5460. },
  5461. INTERNAL,
  5462. { },
  5463. { { 0, 2 } },
  5464. },
  5465. {
  5466. "ALU64_MOD_K: 3 % 2 = 1",
  5467. .u.insns_int = {
  5468. BPF_LD_IMM64(R0, 3),
  5469. BPF_ALU64_IMM(BPF_MOD, R0, 2),
  5470. BPF_EXIT_INSN(),
  5471. },
  5472. INTERNAL,
  5473. { },
  5474. { { 0, 1 } },
  5475. },
  5476. {
  5477. "ALU64_MOD_K: 3 % 1 = 0",
  5478. .u.insns_int = {
  5479. BPF_LD_IMM64(R0, 3),
  5480. BPF_ALU64_IMM(BPF_MOD, R0, 1),
  5481. BPF_EXIT_INSN(),
  5482. },
  5483. INTERNAL,
  5484. { },
  5485. { { 0, 0 } },
  5486. },
  5487. {
  5488. "ALU64_MOD_K: 2147483647 % 2147483645 = 2",
  5489. .u.insns_int = {
  5490. BPF_LD_IMM64(R0, 2147483647),
  5491. BPF_ALU64_IMM(BPF_MOD, R0, 2147483645),
  5492. BPF_EXIT_INSN(),
  5493. },
  5494. INTERNAL,
  5495. { },
  5496. { { 0, 2 } },
  5497. },
  5498. /* BPF_ALU | BPF_AND | BPF_X */
  5499. {
  5500. "ALU_AND_X: 3 & 2 = 2",
  5501. .u.insns_int = {
  5502. BPF_LD_IMM64(R0, 3),
  5503. BPF_ALU32_IMM(BPF_MOV, R1, 2),
  5504. BPF_ALU32_REG(BPF_AND, R0, R1),
  5505. BPF_EXIT_INSN(),
  5506. },
  5507. INTERNAL,
  5508. { },
  5509. { { 0, 2 } },
  5510. },
  5511. {
  5512. "ALU_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
  5513. .u.insns_int = {
  5514. BPF_LD_IMM64(R0, 0xffffffff),
  5515. BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
  5516. BPF_ALU32_REG(BPF_AND, R0, R1),
  5517. BPF_EXIT_INSN(),
  5518. },
  5519. INTERNAL,
  5520. { },
  5521. { { 0, 0xffffffff } },
  5522. },
  5523. {
  5524. "ALU64_AND_X: 3 & 2 = 2",
  5525. .u.insns_int = {
  5526. BPF_LD_IMM64(R0, 3),
  5527. BPF_ALU32_IMM(BPF_MOV, R1, 2),
  5528. BPF_ALU64_REG(BPF_AND, R0, R1),
  5529. BPF_EXIT_INSN(),
  5530. },
  5531. INTERNAL,
  5532. { },
  5533. { { 0, 2 } },
  5534. },
  5535. {
  5536. "ALU64_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
  5537. .u.insns_int = {
  5538. BPF_LD_IMM64(R0, 0xffffffff),
  5539. BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
  5540. BPF_ALU64_REG(BPF_AND, R0, R1),
  5541. BPF_EXIT_INSN(),
  5542. },
  5543. INTERNAL,
  5544. { },
  5545. { { 0, 0xffffffff } },
  5546. },
  5547. /* BPF_ALU | BPF_AND | BPF_K */
  5548. {
  5549. "ALU_AND_K: 3 & 2 = 2",
  5550. .u.insns_int = {
  5551. BPF_LD_IMM64(R0, 3),
  5552. BPF_ALU32_IMM(BPF_AND, R0, 2),
  5553. BPF_EXIT_INSN(),
  5554. },
  5555. INTERNAL,
  5556. { },
  5557. { { 0, 2 } },
  5558. },
  5559. {
  5560. "ALU_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
  5561. .u.insns_int = {
  5562. BPF_LD_IMM64(R0, 0xffffffff),
  5563. BPF_ALU32_IMM(BPF_AND, R0, 0xffffffff),
  5564. BPF_EXIT_INSN(),
  5565. },
  5566. INTERNAL,
  5567. { },
  5568. { { 0, 0xffffffff } },
  5569. },
  5570. {
  5571. "ALU_AND_K: Small immediate",
  5572. .u.insns_int = {
  5573. BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
  5574. BPF_ALU32_IMM(BPF_AND, R0, 15),
  5575. BPF_EXIT_INSN(),
  5576. },
  5577. INTERNAL,
  5578. { },
  5579. { { 0, 4 } }
  5580. },
  5581. {
  5582. "ALU_AND_K: Large immediate",
  5583. .u.insns_int = {
  5584. BPF_ALU32_IMM(BPF_MOV, R0, 0xf1f2f3f4),
  5585. BPF_ALU32_IMM(BPF_AND, R0, 0xafbfcfdf),
  5586. BPF_EXIT_INSN(),
  5587. },
  5588. INTERNAL,
  5589. { },
  5590. { { 0, 0xa1b2c3d4 } }
  5591. },
  5592. {
  5593. "ALU_AND_K: Zero extension",
  5594. .u.insns_int = {
  5595. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  5596. BPF_LD_IMM64(R1, 0x0000000080a0c0e0LL),
  5597. BPF_ALU32_IMM(BPF_AND, R0, 0xf0f0f0f0),
  5598. BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
  5599. BPF_MOV32_IMM(R0, 2),
  5600. BPF_EXIT_INSN(),
  5601. BPF_MOV32_IMM(R0, 1),
  5602. BPF_EXIT_INSN(),
  5603. },
  5604. INTERNAL,
  5605. { },
  5606. { { 0, 1 } }
  5607. },
  5608. {
  5609. "ALU64_AND_K: 3 & 2 = 2",
  5610. .u.insns_int = {
  5611. BPF_LD_IMM64(R0, 3),
  5612. BPF_ALU64_IMM(BPF_AND, R0, 2),
  5613. BPF_EXIT_INSN(),
  5614. },
  5615. INTERNAL,
  5616. { },
  5617. { { 0, 2 } },
  5618. },
  5619. {
  5620. "ALU64_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
  5621. .u.insns_int = {
  5622. BPF_LD_IMM64(R0, 0xffffffff),
  5623. BPF_ALU64_IMM(BPF_AND, R0, 0xffffffff),
  5624. BPF_EXIT_INSN(),
  5625. },
  5626. INTERNAL,
  5627. { },
  5628. { { 0, 0xffffffff } },
  5629. },
  5630. {
  5631. "ALU64_AND_K: 0x0000ffffffff0000 & 0x0 = 0x0000000000000000",
  5632. .u.insns_int = {
  5633. BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
  5634. BPF_LD_IMM64(R3, 0x0000000000000000LL),
  5635. BPF_ALU64_IMM(BPF_AND, R2, 0x0),
  5636. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  5637. BPF_MOV32_IMM(R0, 2),
  5638. BPF_EXIT_INSN(),
  5639. BPF_MOV32_IMM(R0, 1),
  5640. BPF_EXIT_INSN(),
  5641. },
  5642. INTERNAL,
  5643. { },
  5644. { { 0, 0x1 } },
  5645. },
  5646. {
  5647. "ALU64_AND_K: 0x0000ffffffff0000 & -1 = 0x0000ffffffff0000",
  5648. .u.insns_int = {
  5649. BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
  5650. BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
  5651. BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
  5652. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  5653. BPF_MOV32_IMM(R0, 2),
  5654. BPF_EXIT_INSN(),
  5655. BPF_MOV32_IMM(R0, 1),
  5656. BPF_EXIT_INSN(),
  5657. },
  5658. INTERNAL,
  5659. { },
  5660. { { 0, 0x1 } },
  5661. },
  5662. {
  5663. "ALU64_AND_K: 0xffffffffffffffff & -1 = 0xffffffffffffffff",
  5664. .u.insns_int = {
  5665. BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
  5666. BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
  5667. BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
  5668. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  5669. BPF_MOV32_IMM(R0, 2),
  5670. BPF_EXIT_INSN(),
  5671. BPF_MOV32_IMM(R0, 1),
  5672. BPF_EXIT_INSN(),
  5673. },
  5674. INTERNAL,
  5675. { },
  5676. { { 0, 0x1 } },
  5677. },
  5678. {
  5679. "ALU64_AND_K: Sign extension 1",
  5680. .u.insns_int = {
  5681. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  5682. BPF_LD_IMM64(R1, 0x00000000090b0d0fLL),
  5683. BPF_ALU64_IMM(BPF_AND, R0, 0x0f0f0f0f),
  5684. BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
  5685. BPF_MOV32_IMM(R0, 2),
  5686. BPF_EXIT_INSN(),
  5687. BPF_MOV32_IMM(R0, 1),
  5688. BPF_EXIT_INSN(),
  5689. },
  5690. INTERNAL,
  5691. { },
  5692. { { 0, 1 } }
  5693. },
  5694. {
  5695. "ALU64_AND_K: Sign extension 2",
  5696. .u.insns_int = {
  5697. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  5698. BPF_LD_IMM64(R1, 0x0123456780a0c0e0LL),
  5699. BPF_ALU64_IMM(BPF_AND, R0, 0xf0f0f0f0),
  5700. BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
  5701. BPF_MOV32_IMM(R0, 2),
  5702. BPF_EXIT_INSN(),
  5703. BPF_MOV32_IMM(R0, 1),
  5704. BPF_EXIT_INSN(),
  5705. },
  5706. INTERNAL,
  5707. { },
  5708. { { 0, 1 } }
  5709. },
  5710. /* BPF_ALU | BPF_OR | BPF_X */
  5711. {
  5712. "ALU_OR_X: 1 | 2 = 3",
  5713. .u.insns_int = {
  5714. BPF_LD_IMM64(R0, 1),
  5715. BPF_ALU32_IMM(BPF_MOV, R1, 2),
  5716. BPF_ALU32_REG(BPF_OR, R0, R1),
  5717. BPF_EXIT_INSN(),
  5718. },
  5719. INTERNAL,
  5720. { },
  5721. { { 0, 3 } },
  5722. },
  5723. {
  5724. "ALU_OR_X: 0x0 | 0xffffffff = 0xffffffff",
  5725. .u.insns_int = {
  5726. BPF_LD_IMM64(R0, 0),
  5727. BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
  5728. BPF_ALU32_REG(BPF_OR, R0, R1),
  5729. BPF_EXIT_INSN(),
  5730. },
  5731. INTERNAL,
  5732. { },
  5733. { { 0, 0xffffffff } },
  5734. },
  5735. {
  5736. "ALU64_OR_X: 1 | 2 = 3",
  5737. .u.insns_int = {
  5738. BPF_LD_IMM64(R0, 1),
  5739. BPF_ALU32_IMM(BPF_MOV, R1, 2),
  5740. BPF_ALU64_REG(BPF_OR, R0, R1),
  5741. BPF_EXIT_INSN(),
  5742. },
  5743. INTERNAL,
  5744. { },
  5745. { { 0, 3 } },
  5746. },
  5747. {
  5748. "ALU64_OR_X: 0 | 0xffffffff = 0xffffffff",
  5749. .u.insns_int = {
  5750. BPF_LD_IMM64(R0, 0),
  5751. BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
  5752. BPF_ALU64_REG(BPF_OR, R0, R1),
  5753. BPF_EXIT_INSN(),
  5754. },
  5755. INTERNAL,
  5756. { },
  5757. { { 0, 0xffffffff } },
  5758. },
  5759. /* BPF_ALU | BPF_OR | BPF_K */
  5760. {
  5761. "ALU_OR_K: 1 | 2 = 3",
  5762. .u.insns_int = {
  5763. BPF_LD_IMM64(R0, 1),
  5764. BPF_ALU32_IMM(BPF_OR, R0, 2),
  5765. BPF_EXIT_INSN(),
  5766. },
  5767. INTERNAL,
  5768. { },
  5769. { { 0, 3 } },
  5770. },
  5771. {
  5772. "ALU_OR_K: 0 & 0xffffffff = 0xffffffff",
  5773. .u.insns_int = {
  5774. BPF_LD_IMM64(R0, 0),
  5775. BPF_ALU32_IMM(BPF_OR, R0, 0xffffffff),
  5776. BPF_EXIT_INSN(),
  5777. },
  5778. INTERNAL,
  5779. { },
  5780. { { 0, 0xffffffff } },
  5781. },
  5782. {
  5783. "ALU_OR_K: Small immediate",
  5784. .u.insns_int = {
  5785. BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
  5786. BPF_ALU32_IMM(BPF_OR, R0, 1),
  5787. BPF_EXIT_INSN(),
  5788. },
  5789. INTERNAL,
  5790. { },
  5791. { { 0, 0x01020305 } }
  5792. },
  5793. {
  5794. "ALU_OR_K: Large immediate",
  5795. .u.insns_int = {
  5796. BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
  5797. BPF_ALU32_IMM(BPF_OR, R0, 0xa0b0c0d0),
  5798. BPF_EXIT_INSN(),
  5799. },
  5800. INTERNAL,
  5801. { },
  5802. { { 0, 0xa1b2c3d4 } }
  5803. },
  5804. {
  5805. "ALU_OR_K: Zero extension",
  5806. .u.insns_int = {
  5807. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  5808. BPF_LD_IMM64(R1, 0x00000000f9fbfdffLL),
  5809. BPF_ALU32_IMM(BPF_OR, R0, 0xf0f0f0f0),
  5810. BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
  5811. BPF_MOV32_IMM(R0, 2),
  5812. BPF_EXIT_INSN(),
  5813. BPF_MOV32_IMM(R0, 1),
  5814. BPF_EXIT_INSN(),
  5815. },
  5816. INTERNAL,
  5817. { },
  5818. { { 0, 1 } }
  5819. },
  5820. {
  5821. "ALU64_OR_K: 1 | 2 = 3",
  5822. .u.insns_int = {
  5823. BPF_LD_IMM64(R0, 1),
  5824. BPF_ALU64_IMM(BPF_OR, R0, 2),
  5825. BPF_EXIT_INSN(),
  5826. },
  5827. INTERNAL,
  5828. { },
  5829. { { 0, 3 } },
  5830. },
  5831. {
  5832. "ALU64_OR_K: 0 & 0xffffffff = 0xffffffff",
  5833. .u.insns_int = {
  5834. BPF_LD_IMM64(R0, 0),
  5835. BPF_ALU64_IMM(BPF_OR, R0, 0xffffffff),
  5836. BPF_EXIT_INSN(),
  5837. },
  5838. INTERNAL,
  5839. { },
  5840. { { 0, 0xffffffff } },
  5841. },
  5842. {
  5843. "ALU64_OR_K: 0x0000ffffffff0000 | 0x0 = 0x0000ffffffff0000",
  5844. .u.insns_int = {
  5845. BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
  5846. BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
  5847. BPF_ALU64_IMM(BPF_OR, R2, 0x0),
  5848. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  5849. BPF_MOV32_IMM(R0, 2),
  5850. BPF_EXIT_INSN(),
  5851. BPF_MOV32_IMM(R0, 1),
  5852. BPF_EXIT_INSN(),
  5853. },
  5854. INTERNAL,
  5855. { },
  5856. { { 0, 0x1 } },
  5857. },
  5858. {
  5859. "ALU64_OR_K: 0x0000ffffffff0000 | -1 = 0xffffffffffffffff",
  5860. .u.insns_int = {
  5861. BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
  5862. BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
  5863. BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
  5864. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  5865. BPF_MOV32_IMM(R0, 2),
  5866. BPF_EXIT_INSN(),
  5867. BPF_MOV32_IMM(R0, 1),
  5868. BPF_EXIT_INSN(),
  5869. },
  5870. INTERNAL,
  5871. { },
  5872. { { 0, 0x1 } },
  5873. },
  5874. {
  5875. "ALU64_OR_K: 0x000000000000000 | -1 = 0xffffffffffffffff",
  5876. .u.insns_int = {
  5877. BPF_LD_IMM64(R2, 0x0000000000000000LL),
  5878. BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
  5879. BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
  5880. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  5881. BPF_MOV32_IMM(R0, 2),
  5882. BPF_EXIT_INSN(),
  5883. BPF_MOV32_IMM(R0, 1),
  5884. BPF_EXIT_INSN(),
  5885. },
  5886. INTERNAL,
  5887. { },
  5888. { { 0, 0x1 } },
  5889. },
  5890. {
  5891. "ALU64_OR_K: Sign extension 1",
  5892. .u.insns_int = {
  5893. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  5894. BPF_LD_IMM64(R1, 0x012345678fafcfefLL),
  5895. BPF_ALU64_IMM(BPF_OR, R0, 0x0f0f0f0f),
  5896. BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
  5897. BPF_MOV32_IMM(R0, 2),
  5898. BPF_EXIT_INSN(),
  5899. BPF_MOV32_IMM(R0, 1),
  5900. BPF_EXIT_INSN(),
  5901. },
  5902. INTERNAL,
  5903. { },
  5904. { { 0, 1 } }
  5905. },
  5906. {
  5907. "ALU64_OR_K: Sign extension 2",
  5908. .u.insns_int = {
  5909. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  5910. BPF_LD_IMM64(R1, 0xfffffffff9fbfdffLL),
  5911. BPF_ALU64_IMM(BPF_OR, R0, 0xf0f0f0f0),
  5912. BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
  5913. BPF_MOV32_IMM(R0, 2),
  5914. BPF_EXIT_INSN(),
  5915. BPF_MOV32_IMM(R0, 1),
  5916. BPF_EXIT_INSN(),
  5917. },
  5918. INTERNAL,
  5919. { },
  5920. { { 0, 1 } }
  5921. },
  5922. /* BPF_ALU | BPF_XOR | BPF_X */
  5923. {
  5924. "ALU_XOR_X: 5 ^ 6 = 3",
  5925. .u.insns_int = {
  5926. BPF_LD_IMM64(R0, 5),
  5927. BPF_ALU32_IMM(BPF_MOV, R1, 6),
  5928. BPF_ALU32_REG(BPF_XOR, R0, R1),
  5929. BPF_EXIT_INSN(),
  5930. },
  5931. INTERNAL,
  5932. { },
  5933. { { 0, 3 } },
  5934. },
  5935. {
  5936. "ALU_XOR_X: 0x1 ^ 0xffffffff = 0xfffffffe",
  5937. .u.insns_int = {
  5938. BPF_LD_IMM64(R0, 1),
  5939. BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
  5940. BPF_ALU32_REG(BPF_XOR, R0, R1),
  5941. BPF_EXIT_INSN(),
  5942. },
  5943. INTERNAL,
  5944. { },
  5945. { { 0, 0xfffffffe } },
  5946. },
  5947. {
  5948. "ALU64_XOR_X: 5 ^ 6 = 3",
  5949. .u.insns_int = {
  5950. BPF_LD_IMM64(R0, 5),
  5951. BPF_ALU32_IMM(BPF_MOV, R1, 6),
  5952. BPF_ALU64_REG(BPF_XOR, R0, R1),
  5953. BPF_EXIT_INSN(),
  5954. },
  5955. INTERNAL,
  5956. { },
  5957. { { 0, 3 } },
  5958. },
  5959. {
  5960. "ALU64_XOR_X: 1 ^ 0xffffffff = 0xfffffffe",
  5961. .u.insns_int = {
  5962. BPF_LD_IMM64(R0, 1),
  5963. BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
  5964. BPF_ALU64_REG(BPF_XOR, R0, R1),
  5965. BPF_EXIT_INSN(),
  5966. },
  5967. INTERNAL,
  5968. { },
  5969. { { 0, 0xfffffffe } },
  5970. },
  5971. /* BPF_ALU | BPF_XOR | BPF_K */
  5972. {
  5973. "ALU_XOR_K: 5 ^ 6 = 3",
  5974. .u.insns_int = {
  5975. BPF_LD_IMM64(R0, 5),
  5976. BPF_ALU32_IMM(BPF_XOR, R0, 6),
  5977. BPF_EXIT_INSN(),
  5978. },
  5979. INTERNAL,
  5980. { },
  5981. { { 0, 3 } },
  5982. },
  5983. {
  5984. "ALU_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
  5985. .u.insns_int = {
  5986. BPF_LD_IMM64(R0, 1),
  5987. BPF_ALU32_IMM(BPF_XOR, R0, 0xffffffff),
  5988. BPF_EXIT_INSN(),
  5989. },
  5990. INTERNAL,
  5991. { },
  5992. { { 0, 0xfffffffe } },
  5993. },
  5994. {
  5995. "ALU_XOR_K: Small immediate",
  5996. .u.insns_int = {
  5997. BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
  5998. BPF_ALU32_IMM(BPF_XOR, R0, 15),
  5999. BPF_EXIT_INSN(),
  6000. },
  6001. INTERNAL,
  6002. { },
  6003. { { 0, 0x0102030b } }
  6004. },
  6005. {
  6006. "ALU_XOR_K: Large immediate",
  6007. .u.insns_int = {
  6008. BPF_ALU32_IMM(BPF_MOV, R0, 0xf1f2f3f4),
  6009. BPF_ALU32_IMM(BPF_XOR, R0, 0xafbfcfdf),
  6010. BPF_EXIT_INSN(),
  6011. },
  6012. INTERNAL,
  6013. { },
  6014. { { 0, 0x5e4d3c2b } }
  6015. },
  6016. {
  6017. "ALU_XOR_K: Zero extension",
  6018. .u.insns_int = {
  6019. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  6020. BPF_LD_IMM64(R1, 0x00000000795b3d1fLL),
  6021. BPF_ALU32_IMM(BPF_XOR, R0, 0xf0f0f0f0),
  6022. BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
  6023. BPF_MOV32_IMM(R0, 2),
  6024. BPF_EXIT_INSN(),
  6025. BPF_MOV32_IMM(R0, 1),
  6026. BPF_EXIT_INSN(),
  6027. },
  6028. INTERNAL,
  6029. { },
  6030. { { 0, 1 } }
  6031. },
  6032. {
  6033. "ALU64_XOR_K: 5 ^ 6 = 3",
  6034. .u.insns_int = {
  6035. BPF_LD_IMM64(R0, 5),
  6036. BPF_ALU64_IMM(BPF_XOR, R0, 6),
  6037. BPF_EXIT_INSN(),
  6038. },
  6039. INTERNAL,
  6040. { },
  6041. { { 0, 3 } },
  6042. },
  6043. {
  6044. "ALU64_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
  6045. .u.insns_int = {
  6046. BPF_LD_IMM64(R0, 1),
  6047. BPF_ALU64_IMM(BPF_XOR, R0, 0xffffffff),
  6048. BPF_EXIT_INSN(),
  6049. },
  6050. INTERNAL,
  6051. { },
  6052. { { 0, 0xfffffffe } },
  6053. },
  6054. {
  6055. "ALU64_XOR_K: 0x0000ffffffff0000 ^ 0x0 = 0x0000ffffffff0000",
  6056. .u.insns_int = {
  6057. BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
  6058. BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
  6059. BPF_ALU64_IMM(BPF_XOR, R2, 0x0),
  6060. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  6061. BPF_MOV32_IMM(R0, 2),
  6062. BPF_EXIT_INSN(),
  6063. BPF_MOV32_IMM(R0, 1),
  6064. BPF_EXIT_INSN(),
  6065. },
  6066. INTERNAL,
  6067. { },
  6068. { { 0, 0x1 } },
  6069. },
  6070. {
  6071. "ALU64_XOR_K: 0x0000ffffffff0000 ^ -1 = 0xffff00000000ffff",
  6072. .u.insns_int = {
  6073. BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
  6074. BPF_LD_IMM64(R3, 0xffff00000000ffffLL),
  6075. BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
  6076. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  6077. BPF_MOV32_IMM(R0, 2),
  6078. BPF_EXIT_INSN(),
  6079. BPF_MOV32_IMM(R0, 1),
  6080. BPF_EXIT_INSN(),
  6081. },
  6082. INTERNAL,
  6083. { },
  6084. { { 0, 0x1 } },
  6085. },
  6086. {
  6087. "ALU64_XOR_K: 0x000000000000000 ^ -1 = 0xffffffffffffffff",
  6088. .u.insns_int = {
  6089. BPF_LD_IMM64(R2, 0x0000000000000000LL),
  6090. BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
  6091. BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
  6092. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  6093. BPF_MOV32_IMM(R0, 2),
  6094. BPF_EXIT_INSN(),
  6095. BPF_MOV32_IMM(R0, 1),
  6096. BPF_EXIT_INSN(),
  6097. },
  6098. INTERNAL,
  6099. { },
  6100. { { 0, 0x1 } },
  6101. },
  6102. {
  6103. "ALU64_XOR_K: Sign extension 1",
  6104. .u.insns_int = {
  6105. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  6106. BPF_LD_IMM64(R1, 0x0123456786a4c2e0LL),
  6107. BPF_ALU64_IMM(BPF_XOR, R0, 0x0f0f0f0f),
  6108. BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
  6109. BPF_MOV32_IMM(R0, 2),
  6110. BPF_EXIT_INSN(),
  6111. BPF_MOV32_IMM(R0, 1),
  6112. BPF_EXIT_INSN(),
  6113. },
  6114. INTERNAL,
  6115. { },
  6116. { { 0, 1 } }
  6117. },
  6118. {
  6119. "ALU64_XOR_K: Sign extension 2",
  6120. .u.insns_int = {
  6121. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  6122. BPF_LD_IMM64(R1, 0xfedcba98795b3d1fLL),
  6123. BPF_ALU64_IMM(BPF_XOR, R0, 0xf0f0f0f0),
  6124. BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
  6125. BPF_MOV32_IMM(R0, 2),
  6126. BPF_EXIT_INSN(),
  6127. BPF_MOV32_IMM(R0, 1),
  6128. BPF_EXIT_INSN(),
  6129. },
  6130. INTERNAL,
  6131. { },
  6132. { { 0, 1 } }
  6133. },
  6134. /* BPF_ALU | BPF_LSH | BPF_X */
  6135. {
  6136. "ALU_LSH_X: 1 << 1 = 2",
  6137. .u.insns_int = {
  6138. BPF_LD_IMM64(R0, 1),
  6139. BPF_ALU32_IMM(BPF_MOV, R1, 1),
  6140. BPF_ALU32_REG(BPF_LSH, R0, R1),
  6141. BPF_EXIT_INSN(),
  6142. },
  6143. INTERNAL,
  6144. { },
  6145. { { 0, 2 } },
  6146. },
  6147. {
  6148. "ALU_LSH_X: 1 << 31 = 0x80000000",
  6149. .u.insns_int = {
  6150. BPF_LD_IMM64(R0, 1),
  6151. BPF_ALU32_IMM(BPF_MOV, R1, 31),
  6152. BPF_ALU32_REG(BPF_LSH, R0, R1),
  6153. BPF_EXIT_INSN(),
  6154. },
  6155. INTERNAL,
  6156. { },
  6157. { { 0, 0x80000000 } },
  6158. },
  6159. {
  6160. "ALU_LSH_X: 0x12345678 << 12 = 0x45678000",
  6161. .u.insns_int = {
  6162. BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
  6163. BPF_ALU32_IMM(BPF_MOV, R1, 12),
  6164. BPF_ALU32_REG(BPF_LSH, R0, R1),
  6165. BPF_EXIT_INSN(),
  6166. },
  6167. INTERNAL,
  6168. { },
  6169. { { 0, 0x45678000 } }
  6170. },
  6171. {
  6172. "ALU64_LSH_X: 1 << 1 = 2",
  6173. .u.insns_int = {
  6174. BPF_LD_IMM64(R0, 1),
  6175. BPF_ALU32_IMM(BPF_MOV, R1, 1),
  6176. BPF_ALU64_REG(BPF_LSH, R0, R1),
  6177. BPF_EXIT_INSN(),
  6178. },
  6179. INTERNAL,
  6180. { },
  6181. { { 0, 2 } },
  6182. },
  6183. {
  6184. "ALU64_LSH_X: 1 << 31 = 0x80000000",
  6185. .u.insns_int = {
  6186. BPF_LD_IMM64(R0, 1),
  6187. BPF_ALU32_IMM(BPF_MOV, R1, 31),
  6188. BPF_ALU64_REG(BPF_LSH, R0, R1),
  6189. BPF_EXIT_INSN(),
  6190. },
  6191. INTERNAL,
  6192. { },
  6193. { { 0, 0x80000000 } },
  6194. },
  6195. {
  6196. "ALU64_LSH_X: Shift < 32, low word",
  6197. .u.insns_int = {
  6198. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  6199. BPF_ALU32_IMM(BPF_MOV, R1, 12),
  6200. BPF_ALU64_REG(BPF_LSH, R0, R1),
  6201. BPF_EXIT_INSN(),
  6202. },
  6203. INTERNAL,
  6204. { },
  6205. { { 0, 0xbcdef000 } }
  6206. },
  6207. {
  6208. "ALU64_LSH_X: Shift < 32, high word",
  6209. .u.insns_int = {
  6210. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  6211. BPF_ALU32_IMM(BPF_MOV, R1, 12),
  6212. BPF_ALU64_REG(BPF_LSH, R0, R1),
  6213. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6214. BPF_EXIT_INSN(),
  6215. },
  6216. INTERNAL,
  6217. { },
  6218. { { 0, 0x3456789a } }
  6219. },
  6220. {
  6221. "ALU64_LSH_X: Shift > 32, low word",
  6222. .u.insns_int = {
  6223. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  6224. BPF_ALU32_IMM(BPF_MOV, R1, 36),
  6225. BPF_ALU64_REG(BPF_LSH, R0, R1),
  6226. BPF_EXIT_INSN(),
  6227. },
  6228. INTERNAL,
  6229. { },
  6230. { { 0, 0 } }
  6231. },
  6232. {
  6233. "ALU64_LSH_X: Shift > 32, high word",
  6234. .u.insns_int = {
  6235. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  6236. BPF_ALU32_IMM(BPF_MOV, R1, 36),
  6237. BPF_ALU64_REG(BPF_LSH, R0, R1),
  6238. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6239. BPF_EXIT_INSN(),
  6240. },
  6241. INTERNAL,
  6242. { },
  6243. { { 0, 0x9abcdef0 } }
  6244. },
  6245. {
  6246. "ALU64_LSH_X: Shift == 32, low word",
  6247. .u.insns_int = {
  6248. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  6249. BPF_ALU32_IMM(BPF_MOV, R1, 32),
  6250. BPF_ALU64_REG(BPF_LSH, R0, R1),
  6251. BPF_EXIT_INSN(),
  6252. },
  6253. INTERNAL,
  6254. { },
  6255. { { 0, 0 } }
  6256. },
  6257. {
  6258. "ALU64_LSH_X: Shift == 32, high word",
  6259. .u.insns_int = {
  6260. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  6261. BPF_ALU32_IMM(BPF_MOV, R1, 32),
  6262. BPF_ALU64_REG(BPF_LSH, R0, R1),
  6263. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6264. BPF_EXIT_INSN(),
  6265. },
  6266. INTERNAL,
  6267. { },
  6268. { { 0, 0x89abcdef } }
  6269. },
  6270. {
  6271. "ALU64_LSH_X: Zero shift, low word",
  6272. .u.insns_int = {
  6273. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  6274. BPF_ALU32_IMM(BPF_MOV, R1, 0),
  6275. BPF_ALU64_REG(BPF_LSH, R0, R1),
  6276. BPF_EXIT_INSN(),
  6277. },
  6278. INTERNAL,
  6279. { },
  6280. { { 0, 0x89abcdef } }
  6281. },
  6282. {
  6283. "ALU64_LSH_X: Zero shift, high word",
  6284. .u.insns_int = {
  6285. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  6286. BPF_ALU32_IMM(BPF_MOV, R1, 0),
  6287. BPF_ALU64_REG(BPF_LSH, R0, R1),
  6288. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6289. BPF_EXIT_INSN(),
  6290. },
  6291. INTERNAL,
  6292. { },
  6293. { { 0, 0x01234567 } }
  6294. },
  6295. /* BPF_ALU | BPF_LSH | BPF_K */
  6296. {
  6297. "ALU_LSH_K: 1 << 1 = 2",
  6298. .u.insns_int = {
  6299. BPF_LD_IMM64(R0, 1),
  6300. BPF_ALU32_IMM(BPF_LSH, R0, 1),
  6301. BPF_EXIT_INSN(),
  6302. },
  6303. INTERNAL,
  6304. { },
  6305. { { 0, 2 } },
  6306. },
  6307. {
  6308. "ALU_LSH_K: 1 << 31 = 0x80000000",
  6309. .u.insns_int = {
  6310. BPF_LD_IMM64(R0, 1),
  6311. BPF_ALU32_IMM(BPF_LSH, R0, 31),
  6312. BPF_EXIT_INSN(),
  6313. },
  6314. INTERNAL,
  6315. { },
  6316. { { 0, 0x80000000 } },
  6317. },
  6318. {
  6319. "ALU_LSH_K: 0x12345678 << 12 = 0x45678000",
  6320. .u.insns_int = {
  6321. BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
  6322. BPF_ALU32_IMM(BPF_LSH, R0, 12),
  6323. BPF_EXIT_INSN(),
  6324. },
  6325. INTERNAL,
  6326. { },
  6327. { { 0, 0x45678000 } }
  6328. },
  6329. {
  6330. "ALU_LSH_K: 0x12345678 << 0 = 0x12345678",
  6331. .u.insns_int = {
  6332. BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
  6333. BPF_ALU32_IMM(BPF_LSH, R0, 0),
  6334. BPF_EXIT_INSN(),
  6335. },
  6336. INTERNAL,
  6337. { },
  6338. { { 0, 0x12345678 } }
  6339. },
  6340. {
  6341. "ALU64_LSH_K: 1 << 1 = 2",
  6342. .u.insns_int = {
  6343. BPF_LD_IMM64(R0, 1),
  6344. BPF_ALU64_IMM(BPF_LSH, R0, 1),
  6345. BPF_EXIT_INSN(),
  6346. },
  6347. INTERNAL,
  6348. { },
  6349. { { 0, 2 } },
  6350. },
  6351. {
  6352. "ALU64_LSH_K: 1 << 31 = 0x80000000",
  6353. .u.insns_int = {
  6354. BPF_LD_IMM64(R0, 1),
  6355. BPF_ALU64_IMM(BPF_LSH, R0, 31),
  6356. BPF_EXIT_INSN(),
  6357. },
  6358. INTERNAL,
  6359. { },
  6360. { { 0, 0x80000000 } },
  6361. },
  6362. {
  6363. "ALU64_LSH_K: Shift < 32, low word",
  6364. .u.insns_int = {
  6365. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  6366. BPF_ALU64_IMM(BPF_LSH, R0, 12),
  6367. BPF_EXIT_INSN(),
  6368. },
  6369. INTERNAL,
  6370. { },
  6371. { { 0, 0xbcdef000 } }
  6372. },
  6373. {
  6374. "ALU64_LSH_K: Shift < 32, high word",
  6375. .u.insns_int = {
  6376. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  6377. BPF_ALU64_IMM(BPF_LSH, R0, 12),
  6378. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6379. BPF_EXIT_INSN(),
  6380. },
  6381. INTERNAL,
  6382. { },
  6383. { { 0, 0x3456789a } }
  6384. },
  6385. {
  6386. "ALU64_LSH_K: Shift > 32, low word",
  6387. .u.insns_int = {
  6388. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  6389. BPF_ALU64_IMM(BPF_LSH, R0, 36),
  6390. BPF_EXIT_INSN(),
  6391. },
  6392. INTERNAL,
  6393. { },
  6394. { { 0, 0 } }
  6395. },
  6396. {
  6397. "ALU64_LSH_K: Shift > 32, high word",
  6398. .u.insns_int = {
  6399. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  6400. BPF_ALU64_IMM(BPF_LSH, R0, 36),
  6401. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6402. BPF_EXIT_INSN(),
  6403. },
  6404. INTERNAL,
  6405. { },
  6406. { { 0, 0x9abcdef0 } }
  6407. },
  6408. {
  6409. "ALU64_LSH_K: Shift == 32, low word",
  6410. .u.insns_int = {
  6411. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  6412. BPF_ALU64_IMM(BPF_LSH, R0, 32),
  6413. BPF_EXIT_INSN(),
  6414. },
  6415. INTERNAL,
  6416. { },
  6417. { { 0, 0 } }
  6418. },
  6419. {
  6420. "ALU64_LSH_K: Shift == 32, high word",
  6421. .u.insns_int = {
  6422. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  6423. BPF_ALU64_IMM(BPF_LSH, R0, 32),
  6424. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6425. BPF_EXIT_INSN(),
  6426. },
  6427. INTERNAL,
  6428. { },
  6429. { { 0, 0x89abcdef } }
  6430. },
  6431. {
  6432. "ALU64_LSH_K: Zero shift",
  6433. .u.insns_int = {
  6434. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  6435. BPF_ALU64_IMM(BPF_LSH, R0, 0),
  6436. BPF_EXIT_INSN(),
  6437. },
  6438. INTERNAL,
  6439. { },
  6440. { { 0, 0x89abcdef } }
  6441. },
  6442. /* BPF_ALU | BPF_RSH | BPF_X */
  6443. {
  6444. "ALU_RSH_X: 2 >> 1 = 1",
  6445. .u.insns_int = {
  6446. BPF_LD_IMM64(R0, 2),
  6447. BPF_ALU32_IMM(BPF_MOV, R1, 1),
  6448. BPF_ALU32_REG(BPF_RSH, R0, R1),
  6449. BPF_EXIT_INSN(),
  6450. },
  6451. INTERNAL,
  6452. { },
  6453. { { 0, 1 } },
  6454. },
  6455. {
  6456. "ALU_RSH_X: 0x80000000 >> 31 = 1",
  6457. .u.insns_int = {
  6458. BPF_LD_IMM64(R0, 0x80000000),
  6459. BPF_ALU32_IMM(BPF_MOV, R1, 31),
  6460. BPF_ALU32_REG(BPF_RSH, R0, R1),
  6461. BPF_EXIT_INSN(),
  6462. },
  6463. INTERNAL,
  6464. { },
  6465. { { 0, 1 } },
  6466. },
  6467. {
  6468. "ALU_RSH_X: 0x12345678 >> 20 = 0x123",
  6469. .u.insns_int = {
  6470. BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
  6471. BPF_ALU32_IMM(BPF_MOV, R1, 20),
  6472. BPF_ALU32_REG(BPF_RSH, R0, R1),
  6473. BPF_EXIT_INSN(),
  6474. },
  6475. INTERNAL,
  6476. { },
  6477. { { 0, 0x123 } }
  6478. },
  6479. {
  6480. "ALU64_RSH_X: 2 >> 1 = 1",
  6481. .u.insns_int = {
  6482. BPF_LD_IMM64(R0, 2),
  6483. BPF_ALU32_IMM(BPF_MOV, R1, 1),
  6484. BPF_ALU64_REG(BPF_RSH, R0, R1),
  6485. BPF_EXIT_INSN(),
  6486. },
  6487. INTERNAL,
  6488. { },
  6489. { { 0, 1 } },
  6490. },
  6491. {
  6492. "ALU64_RSH_X: 0x80000000 >> 31 = 1",
  6493. .u.insns_int = {
  6494. BPF_LD_IMM64(R0, 0x80000000),
  6495. BPF_ALU32_IMM(BPF_MOV, R1, 31),
  6496. BPF_ALU64_REG(BPF_RSH, R0, R1),
  6497. BPF_EXIT_INSN(),
  6498. },
  6499. INTERNAL,
  6500. { },
  6501. { { 0, 1 } },
  6502. },
  6503. {
  6504. "ALU64_RSH_X: Shift < 32, low word",
  6505. .u.insns_int = {
  6506. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6507. BPF_ALU32_IMM(BPF_MOV, R1, 12),
  6508. BPF_ALU64_REG(BPF_RSH, R0, R1),
  6509. BPF_EXIT_INSN(),
  6510. },
  6511. INTERNAL,
  6512. { },
  6513. { { 0, 0x56789abc } }
  6514. },
  6515. {
  6516. "ALU64_RSH_X: Shift < 32, high word",
  6517. .u.insns_int = {
  6518. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6519. BPF_ALU32_IMM(BPF_MOV, R1, 12),
  6520. BPF_ALU64_REG(BPF_RSH, R0, R1),
  6521. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6522. BPF_EXIT_INSN(),
  6523. },
  6524. INTERNAL,
  6525. { },
  6526. { { 0, 0x00081234 } }
  6527. },
  6528. {
  6529. "ALU64_RSH_X: Shift > 32, low word",
  6530. .u.insns_int = {
  6531. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6532. BPF_ALU32_IMM(BPF_MOV, R1, 36),
  6533. BPF_ALU64_REG(BPF_RSH, R0, R1),
  6534. BPF_EXIT_INSN(),
  6535. },
  6536. INTERNAL,
  6537. { },
  6538. { { 0, 0x08123456 } }
  6539. },
  6540. {
  6541. "ALU64_RSH_X: Shift > 32, high word",
  6542. .u.insns_int = {
  6543. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6544. BPF_ALU32_IMM(BPF_MOV, R1, 36),
  6545. BPF_ALU64_REG(BPF_RSH, R0, R1),
  6546. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6547. BPF_EXIT_INSN(),
  6548. },
  6549. INTERNAL,
  6550. { },
  6551. { { 0, 0 } }
  6552. },
  6553. {
  6554. "ALU64_RSH_X: Shift == 32, low word",
  6555. .u.insns_int = {
  6556. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6557. BPF_ALU32_IMM(BPF_MOV, R1, 32),
  6558. BPF_ALU64_REG(BPF_RSH, R0, R1),
  6559. BPF_EXIT_INSN(),
  6560. },
  6561. INTERNAL,
  6562. { },
  6563. { { 0, 0x81234567 } }
  6564. },
  6565. {
  6566. "ALU64_RSH_X: Shift == 32, high word",
  6567. .u.insns_int = {
  6568. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6569. BPF_ALU32_IMM(BPF_MOV, R1, 32),
  6570. BPF_ALU64_REG(BPF_RSH, R0, R1),
  6571. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6572. BPF_EXIT_INSN(),
  6573. },
  6574. INTERNAL,
  6575. { },
  6576. { { 0, 0 } }
  6577. },
  6578. {
  6579. "ALU64_RSH_X: Zero shift, low word",
  6580. .u.insns_int = {
  6581. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6582. BPF_ALU32_IMM(BPF_MOV, R1, 0),
  6583. BPF_ALU64_REG(BPF_RSH, R0, R1),
  6584. BPF_EXIT_INSN(),
  6585. },
  6586. INTERNAL,
  6587. { },
  6588. { { 0, 0x89abcdef } }
  6589. },
  6590. {
  6591. "ALU64_RSH_X: Zero shift, high word",
  6592. .u.insns_int = {
  6593. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6594. BPF_ALU32_IMM(BPF_MOV, R1, 0),
  6595. BPF_ALU64_REG(BPF_RSH, R0, R1),
  6596. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6597. BPF_EXIT_INSN(),
  6598. },
  6599. INTERNAL,
  6600. { },
  6601. { { 0, 0x81234567 } }
  6602. },
  6603. /* BPF_ALU | BPF_RSH | BPF_K */
  6604. {
  6605. "ALU_RSH_K: 2 >> 1 = 1",
  6606. .u.insns_int = {
  6607. BPF_LD_IMM64(R0, 2),
  6608. BPF_ALU32_IMM(BPF_RSH, R0, 1),
  6609. BPF_EXIT_INSN(),
  6610. },
  6611. INTERNAL,
  6612. { },
  6613. { { 0, 1 } },
  6614. },
  6615. {
  6616. "ALU_RSH_K: 0x80000000 >> 31 = 1",
  6617. .u.insns_int = {
  6618. BPF_LD_IMM64(R0, 0x80000000),
  6619. BPF_ALU32_IMM(BPF_RSH, R0, 31),
  6620. BPF_EXIT_INSN(),
  6621. },
  6622. INTERNAL,
  6623. { },
  6624. { { 0, 1 } },
  6625. },
  6626. {
  6627. "ALU_RSH_K: 0x12345678 >> 20 = 0x123",
  6628. .u.insns_int = {
  6629. BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
  6630. BPF_ALU32_IMM(BPF_RSH, R0, 20),
  6631. BPF_EXIT_INSN(),
  6632. },
  6633. INTERNAL,
  6634. { },
  6635. { { 0, 0x123 } }
  6636. },
  6637. {
  6638. "ALU_RSH_K: 0x12345678 >> 0 = 0x12345678",
  6639. .u.insns_int = {
  6640. BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
  6641. BPF_ALU32_IMM(BPF_RSH, R0, 0),
  6642. BPF_EXIT_INSN(),
  6643. },
  6644. INTERNAL,
  6645. { },
  6646. { { 0, 0x12345678 } }
  6647. },
  6648. {
  6649. "ALU64_RSH_K: 2 >> 1 = 1",
  6650. .u.insns_int = {
  6651. BPF_LD_IMM64(R0, 2),
  6652. BPF_ALU64_IMM(BPF_RSH, R0, 1),
  6653. BPF_EXIT_INSN(),
  6654. },
  6655. INTERNAL,
  6656. { },
  6657. { { 0, 1 } },
  6658. },
  6659. {
  6660. "ALU64_RSH_K: 0x80000000 >> 31 = 1",
  6661. .u.insns_int = {
  6662. BPF_LD_IMM64(R0, 0x80000000),
  6663. BPF_ALU64_IMM(BPF_RSH, R0, 31),
  6664. BPF_EXIT_INSN(),
  6665. },
  6666. INTERNAL,
  6667. { },
  6668. { { 0, 1 } },
  6669. },
  6670. {
  6671. "ALU64_RSH_K: Shift < 32, low word",
  6672. .u.insns_int = {
  6673. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6674. BPF_ALU64_IMM(BPF_RSH, R0, 12),
  6675. BPF_EXIT_INSN(),
  6676. },
  6677. INTERNAL,
  6678. { },
  6679. { { 0, 0x56789abc } }
  6680. },
  6681. {
  6682. "ALU64_RSH_K: Shift < 32, high word",
  6683. .u.insns_int = {
  6684. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6685. BPF_ALU64_IMM(BPF_RSH, R0, 12),
  6686. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6687. BPF_EXIT_INSN(),
  6688. },
  6689. INTERNAL,
  6690. { },
  6691. { { 0, 0x00081234 } }
  6692. },
  6693. {
  6694. "ALU64_RSH_K: Shift > 32, low word",
  6695. .u.insns_int = {
  6696. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6697. BPF_ALU64_IMM(BPF_RSH, R0, 36),
  6698. BPF_EXIT_INSN(),
  6699. },
  6700. INTERNAL,
  6701. { },
  6702. { { 0, 0x08123456 } }
  6703. },
  6704. {
  6705. "ALU64_RSH_K: Shift > 32, high word",
  6706. .u.insns_int = {
  6707. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6708. BPF_ALU64_IMM(BPF_RSH, R0, 36),
  6709. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6710. BPF_EXIT_INSN(),
  6711. },
  6712. INTERNAL,
  6713. { },
  6714. { { 0, 0 } }
  6715. },
  6716. {
  6717. "ALU64_RSH_K: Shift == 32, low word",
  6718. .u.insns_int = {
  6719. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6720. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6721. BPF_EXIT_INSN(),
  6722. },
  6723. INTERNAL,
  6724. { },
  6725. { { 0, 0x81234567 } }
  6726. },
  6727. {
  6728. "ALU64_RSH_K: Shift == 32, high word",
  6729. .u.insns_int = {
  6730. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6731. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6732. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6733. BPF_EXIT_INSN(),
  6734. },
  6735. INTERNAL,
  6736. { },
  6737. { { 0, 0 } }
  6738. },
  6739. {
  6740. "ALU64_RSH_K: Zero shift",
  6741. .u.insns_int = {
  6742. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  6743. BPF_ALU64_IMM(BPF_RSH, R0, 0),
  6744. BPF_EXIT_INSN(),
  6745. },
  6746. INTERNAL,
  6747. { },
  6748. { { 0, 0x89abcdef } }
  6749. },
  6750. /* BPF_ALU | BPF_ARSH | BPF_X */
  6751. {
  6752. "ALU32_ARSH_X: -1234 >> 7 = -10",
  6753. .u.insns_int = {
  6754. BPF_ALU32_IMM(BPF_MOV, R0, -1234),
  6755. BPF_ALU32_IMM(BPF_MOV, R1, 7),
  6756. BPF_ALU32_REG(BPF_ARSH, R0, R1),
  6757. BPF_EXIT_INSN(),
  6758. },
  6759. INTERNAL,
  6760. { },
  6761. { { 0, -10 } }
  6762. },
  6763. {
  6764. "ALU64_ARSH_X: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
  6765. .u.insns_int = {
  6766. BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
  6767. BPF_ALU32_IMM(BPF_MOV, R1, 40),
  6768. BPF_ALU64_REG(BPF_ARSH, R0, R1),
  6769. BPF_EXIT_INSN(),
  6770. },
  6771. INTERNAL,
  6772. { },
  6773. { { 0, 0xffff00ff } },
  6774. },
  6775. {
  6776. "ALU64_ARSH_X: Shift < 32, low word",
  6777. .u.insns_int = {
  6778. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6779. BPF_ALU32_IMM(BPF_MOV, R1, 12),
  6780. BPF_ALU64_REG(BPF_ARSH, R0, R1),
  6781. BPF_EXIT_INSN(),
  6782. },
  6783. INTERNAL,
  6784. { },
  6785. { { 0, 0x56789abc } }
  6786. },
  6787. {
  6788. "ALU64_ARSH_X: Shift < 32, high word",
  6789. .u.insns_int = {
  6790. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6791. BPF_ALU32_IMM(BPF_MOV, R1, 12),
  6792. BPF_ALU64_REG(BPF_ARSH, R0, R1),
  6793. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6794. BPF_EXIT_INSN(),
  6795. },
  6796. INTERNAL,
  6797. { },
  6798. { { 0, 0xfff81234 } }
  6799. },
  6800. {
  6801. "ALU64_ARSH_X: Shift > 32, low word",
  6802. .u.insns_int = {
  6803. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6804. BPF_ALU32_IMM(BPF_MOV, R1, 36),
  6805. BPF_ALU64_REG(BPF_ARSH, R0, R1),
  6806. BPF_EXIT_INSN(),
  6807. },
  6808. INTERNAL,
  6809. { },
  6810. { { 0, 0xf8123456 } }
  6811. },
  6812. {
  6813. "ALU64_ARSH_X: Shift > 32, high word",
  6814. .u.insns_int = {
  6815. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6816. BPF_ALU32_IMM(BPF_MOV, R1, 36),
  6817. BPF_ALU64_REG(BPF_ARSH, R0, R1),
  6818. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6819. BPF_EXIT_INSN(),
  6820. },
  6821. INTERNAL,
  6822. { },
  6823. { { 0, -1 } }
  6824. },
  6825. {
  6826. "ALU64_ARSH_X: Shift == 32, low word",
  6827. .u.insns_int = {
  6828. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6829. BPF_ALU32_IMM(BPF_MOV, R1, 32),
  6830. BPF_ALU64_REG(BPF_ARSH, R0, R1),
  6831. BPF_EXIT_INSN(),
  6832. },
  6833. INTERNAL,
  6834. { },
  6835. { { 0, 0x81234567 } }
  6836. },
  6837. {
  6838. "ALU64_ARSH_X: Shift == 32, high word",
  6839. .u.insns_int = {
  6840. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6841. BPF_ALU32_IMM(BPF_MOV, R1, 32),
  6842. BPF_ALU64_REG(BPF_ARSH, R0, R1),
  6843. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6844. BPF_EXIT_INSN(),
  6845. },
  6846. INTERNAL,
  6847. { },
  6848. { { 0, -1 } }
  6849. },
  6850. {
  6851. "ALU64_ARSH_X: Zero shift, low word",
  6852. .u.insns_int = {
  6853. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6854. BPF_ALU32_IMM(BPF_MOV, R1, 0),
  6855. BPF_ALU64_REG(BPF_ARSH, R0, R1),
  6856. BPF_EXIT_INSN(),
  6857. },
  6858. INTERNAL,
  6859. { },
  6860. { { 0, 0x89abcdef } }
  6861. },
  6862. {
  6863. "ALU64_ARSH_X: Zero shift, high word",
  6864. .u.insns_int = {
  6865. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6866. BPF_ALU32_IMM(BPF_MOV, R1, 0),
  6867. BPF_ALU64_REG(BPF_ARSH, R0, R1),
  6868. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6869. BPF_EXIT_INSN(),
  6870. },
  6871. INTERNAL,
  6872. { },
  6873. { { 0, 0x81234567 } }
  6874. },
  6875. /* BPF_ALU | BPF_ARSH | BPF_K */
  6876. {
  6877. "ALU32_ARSH_K: -1234 >> 7 = -10",
  6878. .u.insns_int = {
  6879. BPF_ALU32_IMM(BPF_MOV, R0, -1234),
  6880. BPF_ALU32_IMM(BPF_ARSH, R0, 7),
  6881. BPF_EXIT_INSN(),
  6882. },
  6883. INTERNAL,
  6884. { },
  6885. { { 0, -10 } }
  6886. },
  6887. {
  6888. "ALU32_ARSH_K: -1234 >> 0 = -1234",
  6889. .u.insns_int = {
  6890. BPF_ALU32_IMM(BPF_MOV, R0, -1234),
  6891. BPF_ALU32_IMM(BPF_ARSH, R0, 0),
  6892. BPF_EXIT_INSN(),
  6893. },
  6894. INTERNAL,
  6895. { },
  6896. { { 0, -1234 } }
  6897. },
  6898. {
  6899. "ALU64_ARSH_K: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
  6900. .u.insns_int = {
  6901. BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
  6902. BPF_ALU64_IMM(BPF_ARSH, R0, 40),
  6903. BPF_EXIT_INSN(),
  6904. },
  6905. INTERNAL,
  6906. { },
  6907. { { 0, 0xffff00ff } },
  6908. },
  6909. {
  6910. "ALU64_ARSH_K: Shift < 32, low word",
  6911. .u.insns_int = {
  6912. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6913. BPF_ALU64_IMM(BPF_RSH, R0, 12),
  6914. BPF_EXIT_INSN(),
  6915. },
  6916. INTERNAL,
  6917. { },
  6918. { { 0, 0x56789abc } }
  6919. },
  6920. {
  6921. "ALU64_ARSH_K: Shift < 32, high word",
  6922. .u.insns_int = {
  6923. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6924. BPF_ALU64_IMM(BPF_ARSH, R0, 12),
  6925. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6926. BPF_EXIT_INSN(),
  6927. },
  6928. INTERNAL,
  6929. { },
  6930. { { 0, 0xfff81234 } }
  6931. },
  6932. {
  6933. "ALU64_ARSH_K: Shift > 32, low word",
  6934. .u.insns_int = {
  6935. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6936. BPF_ALU64_IMM(BPF_ARSH, R0, 36),
  6937. BPF_EXIT_INSN(),
  6938. },
  6939. INTERNAL,
  6940. { },
  6941. { { 0, 0xf8123456 } }
  6942. },
  6943. {
  6944. "ALU64_ARSH_K: Shift > 32, high word",
  6945. .u.insns_int = {
  6946. BPF_LD_IMM64(R0, 0xf123456789abcdefLL),
  6947. BPF_ALU64_IMM(BPF_ARSH, R0, 36),
  6948. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6949. BPF_EXIT_INSN(),
  6950. },
  6951. INTERNAL,
  6952. { },
  6953. { { 0, -1 } }
  6954. },
  6955. {
  6956. "ALU64_ARSH_K: Shift == 32, low word",
  6957. .u.insns_int = {
  6958. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6959. BPF_ALU64_IMM(BPF_ARSH, R0, 32),
  6960. BPF_EXIT_INSN(),
  6961. },
  6962. INTERNAL,
  6963. { },
  6964. { { 0, 0x81234567 } }
  6965. },
  6966. {
  6967. "ALU64_ARSH_K: Shift == 32, high word",
  6968. .u.insns_int = {
  6969. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6970. BPF_ALU64_IMM(BPF_ARSH, R0, 32),
  6971. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  6972. BPF_EXIT_INSN(),
  6973. },
  6974. INTERNAL,
  6975. { },
  6976. { { 0, -1 } }
  6977. },
  6978. {
  6979. "ALU64_ARSH_K: Zero shift",
  6980. .u.insns_int = {
  6981. BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
  6982. BPF_ALU64_IMM(BPF_ARSH, R0, 0),
  6983. BPF_EXIT_INSN(),
  6984. },
  6985. INTERNAL,
  6986. { },
  6987. { { 0, 0x89abcdef } }
  6988. },
  6989. /* BPF_ALU | BPF_NEG */
  6990. {
  6991. "ALU_NEG: -(3) = -3",
  6992. .u.insns_int = {
  6993. BPF_ALU32_IMM(BPF_MOV, R0, 3),
  6994. BPF_ALU32_IMM(BPF_NEG, R0, 0),
  6995. BPF_EXIT_INSN(),
  6996. },
  6997. INTERNAL,
  6998. { },
  6999. { { 0, -3 } },
  7000. },
  7001. {
  7002. "ALU_NEG: -(-3) = 3",
  7003. .u.insns_int = {
  7004. BPF_ALU32_IMM(BPF_MOV, R0, -3),
  7005. BPF_ALU32_IMM(BPF_NEG, R0, 0),
  7006. BPF_EXIT_INSN(),
  7007. },
  7008. INTERNAL,
  7009. { },
  7010. { { 0, 3 } },
  7011. },
  7012. {
  7013. "ALU64_NEG: -(3) = -3",
  7014. .u.insns_int = {
  7015. BPF_LD_IMM64(R0, 3),
  7016. BPF_ALU64_IMM(BPF_NEG, R0, 0),
  7017. BPF_EXIT_INSN(),
  7018. },
  7019. INTERNAL,
  7020. { },
  7021. { { 0, -3 } },
  7022. },
  7023. {
  7024. "ALU64_NEG: -(-3) = 3",
  7025. .u.insns_int = {
  7026. BPF_LD_IMM64(R0, -3),
  7027. BPF_ALU64_IMM(BPF_NEG, R0, 0),
  7028. BPF_EXIT_INSN(),
  7029. },
  7030. INTERNAL,
  7031. { },
  7032. { { 0, 3 } },
  7033. },
  7034. /* BPF_ALU | BPF_END | BPF_FROM_BE */
  7035. {
  7036. "ALU_END_FROM_BE 16: 0x0123456789abcdef -> 0xcdef",
  7037. .u.insns_int = {
  7038. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  7039. BPF_ENDIAN(BPF_FROM_BE, R0, 16),
  7040. BPF_EXIT_INSN(),
  7041. },
  7042. INTERNAL,
  7043. { },
  7044. { { 0, cpu_to_be16(0xcdef) } },
  7045. },
  7046. {
  7047. "ALU_END_FROM_BE 32: 0x0123456789abcdef -> 0x89abcdef",
  7048. .u.insns_int = {
  7049. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  7050. BPF_ENDIAN(BPF_FROM_BE, R0, 32),
  7051. BPF_ALU64_REG(BPF_MOV, R1, R0),
  7052. BPF_ALU64_IMM(BPF_RSH, R1, 32),
  7053. BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
  7054. BPF_EXIT_INSN(),
  7055. },
  7056. INTERNAL,
  7057. { },
  7058. { { 0, cpu_to_be32(0x89abcdef) } },
  7059. },
  7060. {
  7061. "ALU_END_FROM_BE 64: 0x0123456789abcdef -> 0x89abcdef",
  7062. .u.insns_int = {
  7063. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  7064. BPF_ENDIAN(BPF_FROM_BE, R0, 64),
  7065. BPF_EXIT_INSN(),
  7066. },
  7067. INTERNAL,
  7068. { },
  7069. { { 0, (u32) cpu_to_be64(0x0123456789abcdefLL) } },
  7070. },
  7071. {
  7072. "ALU_END_FROM_BE 64: 0x0123456789abcdef >> 32 -> 0x01234567",
  7073. .u.insns_int = {
  7074. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  7075. BPF_ENDIAN(BPF_FROM_BE, R0, 64),
  7076. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  7077. BPF_EXIT_INSN(),
  7078. },
  7079. INTERNAL,
  7080. { },
  7081. { { 0, (u32) (cpu_to_be64(0x0123456789abcdefLL) >> 32) } },
  7082. },
  7083. /* BPF_ALU | BPF_END | BPF_FROM_BE, reversed */
  7084. {
  7085. "ALU_END_FROM_BE 16: 0xfedcba9876543210 -> 0x3210",
  7086. .u.insns_int = {
  7087. BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
  7088. BPF_ENDIAN(BPF_FROM_BE, R0, 16),
  7089. BPF_EXIT_INSN(),
  7090. },
  7091. INTERNAL,
  7092. { },
  7093. { { 0, cpu_to_be16(0x3210) } },
  7094. },
  7095. {
  7096. "ALU_END_FROM_BE 32: 0xfedcba9876543210 -> 0x76543210",
  7097. .u.insns_int = {
  7098. BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
  7099. BPF_ENDIAN(BPF_FROM_BE, R0, 32),
  7100. BPF_ALU64_REG(BPF_MOV, R1, R0),
  7101. BPF_ALU64_IMM(BPF_RSH, R1, 32),
  7102. BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
  7103. BPF_EXIT_INSN(),
  7104. },
  7105. INTERNAL,
  7106. { },
  7107. { { 0, cpu_to_be32(0x76543210) } },
  7108. },
  7109. {
  7110. "ALU_END_FROM_BE 64: 0xfedcba9876543210 -> 0x76543210",
  7111. .u.insns_int = {
  7112. BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
  7113. BPF_ENDIAN(BPF_FROM_BE, R0, 64),
  7114. BPF_EXIT_INSN(),
  7115. },
  7116. INTERNAL,
  7117. { },
  7118. { { 0, (u32) cpu_to_be64(0xfedcba9876543210ULL) } },
  7119. },
  7120. {
  7121. "ALU_END_FROM_BE 64: 0xfedcba9876543210 >> 32 -> 0xfedcba98",
  7122. .u.insns_int = {
  7123. BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
  7124. BPF_ENDIAN(BPF_FROM_BE, R0, 64),
  7125. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  7126. BPF_EXIT_INSN(),
  7127. },
  7128. INTERNAL,
  7129. { },
  7130. { { 0, (u32) (cpu_to_be64(0xfedcba9876543210ULL) >> 32) } },
  7131. },
  7132. /* BPF_ALU | BPF_END | BPF_FROM_LE */
  7133. {
  7134. "ALU_END_FROM_LE 16: 0x0123456789abcdef -> 0xefcd",
  7135. .u.insns_int = {
  7136. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  7137. BPF_ENDIAN(BPF_FROM_LE, R0, 16),
  7138. BPF_EXIT_INSN(),
  7139. },
  7140. INTERNAL,
  7141. { },
  7142. { { 0, cpu_to_le16(0xcdef) } },
  7143. },
  7144. {
  7145. "ALU_END_FROM_LE 32: 0x0123456789abcdef -> 0xefcdab89",
  7146. .u.insns_int = {
  7147. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  7148. BPF_ENDIAN(BPF_FROM_LE, R0, 32),
  7149. BPF_ALU64_REG(BPF_MOV, R1, R0),
  7150. BPF_ALU64_IMM(BPF_RSH, R1, 32),
  7151. BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
  7152. BPF_EXIT_INSN(),
  7153. },
  7154. INTERNAL,
  7155. { },
  7156. { { 0, cpu_to_le32(0x89abcdef) } },
  7157. },
  7158. {
  7159. "ALU_END_FROM_LE 64: 0x0123456789abcdef -> 0x67452301",
  7160. .u.insns_int = {
  7161. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  7162. BPF_ENDIAN(BPF_FROM_LE, R0, 64),
  7163. BPF_EXIT_INSN(),
  7164. },
  7165. INTERNAL,
  7166. { },
  7167. { { 0, (u32) cpu_to_le64(0x0123456789abcdefLL) } },
  7168. },
  7169. {
  7170. "ALU_END_FROM_LE 64: 0x0123456789abcdef >> 32 -> 0xefcdab89",
  7171. .u.insns_int = {
  7172. BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
  7173. BPF_ENDIAN(BPF_FROM_LE, R0, 64),
  7174. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  7175. BPF_EXIT_INSN(),
  7176. },
  7177. INTERNAL,
  7178. { },
  7179. { { 0, (u32) (cpu_to_le64(0x0123456789abcdefLL) >> 32) } },
  7180. },
  7181. /* BPF_ALU | BPF_END | BPF_FROM_LE, reversed */
  7182. {
  7183. "ALU_END_FROM_LE 16: 0xfedcba9876543210 -> 0x1032",
  7184. .u.insns_int = {
  7185. BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
  7186. BPF_ENDIAN(BPF_FROM_LE, R0, 16),
  7187. BPF_EXIT_INSN(),
  7188. },
  7189. INTERNAL,
  7190. { },
  7191. { { 0, cpu_to_le16(0x3210) } },
  7192. },
  7193. {
  7194. "ALU_END_FROM_LE 32: 0xfedcba9876543210 -> 0x10325476",
  7195. .u.insns_int = {
  7196. BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
  7197. BPF_ENDIAN(BPF_FROM_LE, R0, 32),
  7198. BPF_ALU64_REG(BPF_MOV, R1, R0),
  7199. BPF_ALU64_IMM(BPF_RSH, R1, 32),
  7200. BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
  7201. BPF_EXIT_INSN(),
  7202. },
  7203. INTERNAL,
  7204. { },
  7205. { { 0, cpu_to_le32(0x76543210) } },
  7206. },
  7207. {
  7208. "ALU_END_FROM_LE 64: 0xfedcba9876543210 -> 0x10325476",
  7209. .u.insns_int = {
  7210. BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
  7211. BPF_ENDIAN(BPF_FROM_LE, R0, 64),
  7212. BPF_EXIT_INSN(),
  7213. },
  7214. INTERNAL,
  7215. { },
  7216. { { 0, (u32) cpu_to_le64(0xfedcba9876543210ULL) } },
  7217. },
  7218. {
  7219. "ALU_END_FROM_LE 64: 0xfedcba9876543210 >> 32 -> 0x98badcfe",
  7220. .u.insns_int = {
  7221. BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
  7222. BPF_ENDIAN(BPF_FROM_LE, R0, 64),
  7223. BPF_ALU64_IMM(BPF_RSH, R0, 32),
  7224. BPF_EXIT_INSN(),
  7225. },
  7226. INTERNAL,
  7227. { },
  7228. { { 0, (u32) (cpu_to_le64(0xfedcba9876543210ULL) >> 32) } },
  7229. },
  7230. /* BPF_LDX_MEM B/H/W/DW */
  7231. {
  7232. "BPF_LDX_MEM | BPF_B, base",
  7233. .u.insns_int = {
  7234. BPF_LD_IMM64(R1, 0x0102030405060708ULL),
  7235. BPF_LD_IMM64(R2, 0x0000000000000008ULL),
  7236. BPF_STX_MEM(BPF_DW, R10, R1, -8),
  7237. #ifdef __BIG_ENDIAN
  7238. BPF_LDX_MEM(BPF_B, R0, R10, -1),
  7239. #else
  7240. BPF_LDX_MEM(BPF_B, R0, R10, -8),
  7241. #endif
  7242. BPF_JMP_REG(BPF_JNE, R0, R2, 1),
  7243. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7244. BPF_EXIT_INSN(),
  7245. },
  7246. INTERNAL,
  7247. { },
  7248. { { 0, 0 } },
  7249. .stack_depth = 8,
  7250. },
  7251. {
  7252. "BPF_LDX_MEM | BPF_B, MSB set",
  7253. .u.insns_int = {
  7254. BPF_LD_IMM64(R1, 0x8182838485868788ULL),
  7255. BPF_LD_IMM64(R2, 0x0000000000000088ULL),
  7256. BPF_STX_MEM(BPF_DW, R10, R1, -8),
  7257. #ifdef __BIG_ENDIAN
  7258. BPF_LDX_MEM(BPF_B, R0, R10, -1),
  7259. #else
  7260. BPF_LDX_MEM(BPF_B, R0, R10, -8),
  7261. #endif
  7262. BPF_JMP_REG(BPF_JNE, R0, R2, 1),
  7263. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7264. BPF_EXIT_INSN(),
  7265. },
  7266. INTERNAL,
  7267. { },
  7268. { { 0, 0 } },
  7269. .stack_depth = 8,
  7270. },
  7271. {
  7272. "BPF_LDX_MEM | BPF_B, negative offset",
  7273. .u.insns_int = {
  7274. BPF_LD_IMM64(R2, 0x8182838485868788ULL),
  7275. BPF_LD_IMM64(R3, 0x0000000000000088ULL),
  7276. BPF_ALU64_IMM(BPF_ADD, R1, 512),
  7277. BPF_STX_MEM(BPF_B, R1, R2, -256),
  7278. BPF_LDX_MEM(BPF_B, R0, R1, -256),
  7279. BPF_JMP_REG(BPF_JNE, R0, R3, 1),
  7280. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7281. BPF_EXIT_INSN(),
  7282. },
  7283. INTERNAL | FLAG_LARGE_MEM,
  7284. { },
  7285. { { 512, 0 } },
  7286. .stack_depth = 0,
  7287. },
  7288. {
  7289. "BPF_LDX_MEM | BPF_B, small positive offset",
  7290. .u.insns_int = {
  7291. BPF_LD_IMM64(R2, 0x8182838485868788ULL),
  7292. BPF_LD_IMM64(R3, 0x0000000000000088ULL),
  7293. BPF_STX_MEM(BPF_B, R1, R2, 256),
  7294. BPF_LDX_MEM(BPF_B, R0, R1, 256),
  7295. BPF_JMP_REG(BPF_JNE, R0, R3, 1),
  7296. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7297. BPF_EXIT_INSN(),
  7298. },
  7299. INTERNAL | FLAG_LARGE_MEM,
  7300. { },
  7301. { { 512, 0 } },
  7302. .stack_depth = 0,
  7303. },
  7304. {
  7305. "BPF_LDX_MEM | BPF_B, large positive offset",
  7306. .u.insns_int = {
  7307. BPF_LD_IMM64(R2, 0x8182838485868788ULL),
  7308. BPF_LD_IMM64(R3, 0x0000000000000088ULL),
  7309. BPF_STX_MEM(BPF_B, R1, R2, 4096),
  7310. BPF_LDX_MEM(BPF_B, R0, R1, 4096),
  7311. BPF_JMP_REG(BPF_JNE, R0, R3, 1),
  7312. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7313. BPF_EXIT_INSN(),
  7314. },
  7315. INTERNAL | FLAG_LARGE_MEM,
  7316. { },
  7317. { { 4096 + 16, 0 } },
  7318. .stack_depth = 0,
  7319. },
  7320. {
  7321. "BPF_LDX_MEM | BPF_H, base",
  7322. .u.insns_int = {
  7323. BPF_LD_IMM64(R1, 0x0102030405060708ULL),
  7324. BPF_LD_IMM64(R2, 0x0000000000000708ULL),
  7325. BPF_STX_MEM(BPF_DW, R10, R1, -8),
  7326. #ifdef __BIG_ENDIAN
  7327. BPF_LDX_MEM(BPF_H, R0, R10, -2),
  7328. #else
  7329. BPF_LDX_MEM(BPF_H, R0, R10, -8),
  7330. #endif
  7331. BPF_JMP_REG(BPF_JNE, R0, R2, 1),
  7332. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7333. BPF_EXIT_INSN(),
  7334. },
  7335. INTERNAL,
  7336. { },
  7337. { { 0, 0 } },
  7338. .stack_depth = 8,
  7339. },
  7340. {
  7341. "BPF_LDX_MEM | BPF_H, MSB set",
  7342. .u.insns_int = {
  7343. BPF_LD_IMM64(R1, 0x8182838485868788ULL),
  7344. BPF_LD_IMM64(R2, 0x0000000000008788ULL),
  7345. BPF_STX_MEM(BPF_DW, R10, R1, -8),
  7346. #ifdef __BIG_ENDIAN
  7347. BPF_LDX_MEM(BPF_H, R0, R10, -2),
  7348. #else
  7349. BPF_LDX_MEM(BPF_H, R0, R10, -8),
  7350. #endif
  7351. BPF_JMP_REG(BPF_JNE, R0, R2, 1),
  7352. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7353. BPF_EXIT_INSN(),
  7354. },
  7355. INTERNAL,
  7356. { },
  7357. { { 0, 0 } },
  7358. .stack_depth = 8,
  7359. },
  7360. {
  7361. "BPF_LDX_MEM | BPF_H, negative offset",
  7362. .u.insns_int = {
  7363. BPF_LD_IMM64(R2, 0x8182838485868788ULL),
  7364. BPF_LD_IMM64(R3, 0x0000000000008788ULL),
  7365. BPF_ALU64_IMM(BPF_ADD, R1, 512),
  7366. BPF_STX_MEM(BPF_H, R1, R2, -256),
  7367. BPF_LDX_MEM(BPF_H, R0, R1, -256),
  7368. BPF_JMP_REG(BPF_JNE, R0, R3, 1),
  7369. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7370. BPF_EXIT_INSN(),
  7371. },
  7372. INTERNAL | FLAG_LARGE_MEM,
  7373. { },
  7374. { { 512, 0 } },
  7375. .stack_depth = 0,
  7376. },
  7377. {
  7378. "BPF_LDX_MEM | BPF_H, small positive offset",
  7379. .u.insns_int = {
  7380. BPF_LD_IMM64(R2, 0x8182838485868788ULL),
  7381. BPF_LD_IMM64(R3, 0x0000000000008788ULL),
  7382. BPF_STX_MEM(BPF_H, R1, R2, 256),
  7383. BPF_LDX_MEM(BPF_H, R0, R1, 256),
  7384. BPF_JMP_REG(BPF_JNE, R0, R3, 1),
  7385. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7386. BPF_EXIT_INSN(),
  7387. },
  7388. INTERNAL | FLAG_LARGE_MEM,
  7389. { },
  7390. { { 512, 0 } },
  7391. .stack_depth = 0,
  7392. },
  7393. {
  7394. "BPF_LDX_MEM | BPF_H, large positive offset",
  7395. .u.insns_int = {
  7396. BPF_LD_IMM64(R2, 0x8182838485868788ULL),
  7397. BPF_LD_IMM64(R3, 0x0000000000008788ULL),
  7398. BPF_STX_MEM(BPF_H, R1, R2, 8192),
  7399. BPF_LDX_MEM(BPF_H, R0, R1, 8192),
  7400. BPF_JMP_REG(BPF_JNE, R0, R3, 1),
  7401. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7402. BPF_EXIT_INSN(),
  7403. },
  7404. INTERNAL | FLAG_LARGE_MEM,
  7405. { },
  7406. { { 8192 + 16, 0 } },
  7407. .stack_depth = 0,
  7408. },
  7409. {
  7410. "BPF_LDX_MEM | BPF_H, unaligned positive offset",
  7411. .u.insns_int = {
  7412. BPF_LD_IMM64(R2, 0x8182838485868788ULL),
  7413. BPF_LD_IMM64(R3, 0x0000000000008788ULL),
  7414. BPF_STX_MEM(BPF_H, R1, R2, 13),
  7415. BPF_LDX_MEM(BPF_H, R0, R1, 13),
  7416. BPF_JMP_REG(BPF_JNE, R0, R3, 1),
  7417. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7418. BPF_EXIT_INSN(),
  7419. },
  7420. INTERNAL | FLAG_LARGE_MEM,
  7421. { },
  7422. { { 32, 0 } },
  7423. .stack_depth = 0,
  7424. },
  7425. {
  7426. "BPF_LDX_MEM | BPF_W, base",
  7427. .u.insns_int = {
  7428. BPF_LD_IMM64(R1, 0x0102030405060708ULL),
  7429. BPF_LD_IMM64(R2, 0x0000000005060708ULL),
  7430. BPF_STX_MEM(BPF_DW, R10, R1, -8),
  7431. #ifdef __BIG_ENDIAN
  7432. BPF_LDX_MEM(BPF_W, R0, R10, -4),
  7433. #else
  7434. BPF_LDX_MEM(BPF_W, R0, R10, -8),
  7435. #endif
  7436. BPF_JMP_REG(BPF_JNE, R0, R2, 1),
  7437. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7438. BPF_EXIT_INSN(),
  7439. },
  7440. INTERNAL,
  7441. { },
  7442. { { 0, 0 } },
  7443. .stack_depth = 8,
  7444. },
  7445. {
  7446. "BPF_LDX_MEM | BPF_W, MSB set",
  7447. .u.insns_int = {
  7448. BPF_LD_IMM64(R1, 0x8182838485868788ULL),
  7449. BPF_LD_IMM64(R2, 0x0000000085868788ULL),
  7450. BPF_STX_MEM(BPF_DW, R10, R1, -8),
  7451. #ifdef __BIG_ENDIAN
  7452. BPF_LDX_MEM(BPF_W, R0, R10, -4),
  7453. #else
  7454. BPF_LDX_MEM(BPF_W, R0, R10, -8),
  7455. #endif
  7456. BPF_JMP_REG(BPF_JNE, R0, R2, 1),
  7457. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7458. BPF_EXIT_INSN(),
  7459. },
  7460. INTERNAL,
  7461. { },
  7462. { { 0, 0 } },
  7463. .stack_depth = 8,
  7464. },
  7465. {
  7466. "BPF_LDX_MEM | BPF_W, negative offset",
  7467. .u.insns_int = {
  7468. BPF_LD_IMM64(R2, 0x8182838485868788ULL),
  7469. BPF_LD_IMM64(R3, 0x0000000085868788ULL),
  7470. BPF_ALU64_IMM(BPF_ADD, R1, 512),
  7471. BPF_STX_MEM(BPF_W, R1, R2, -256),
  7472. BPF_LDX_MEM(BPF_W, R0, R1, -256),
  7473. BPF_JMP_REG(BPF_JNE, R0, R3, 1),
  7474. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7475. BPF_EXIT_INSN(),
  7476. },
  7477. INTERNAL | FLAG_LARGE_MEM,
  7478. { },
  7479. { { 512, 0 } },
  7480. .stack_depth = 0,
  7481. },
  7482. {
  7483. "BPF_LDX_MEM | BPF_W, small positive offset",
  7484. .u.insns_int = {
  7485. BPF_LD_IMM64(R2, 0x8182838485868788ULL),
  7486. BPF_LD_IMM64(R3, 0x0000000085868788ULL),
  7487. BPF_STX_MEM(BPF_W, R1, R2, 256),
  7488. BPF_LDX_MEM(BPF_W, R0, R1, 256),
  7489. BPF_JMP_REG(BPF_JNE, R0, R3, 1),
  7490. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7491. BPF_EXIT_INSN(),
  7492. },
  7493. INTERNAL | FLAG_LARGE_MEM,
  7494. { },
  7495. { { 512, 0 } },
  7496. .stack_depth = 0,
  7497. },
  7498. {
  7499. "BPF_LDX_MEM | BPF_W, large positive offset",
  7500. .u.insns_int = {
  7501. BPF_LD_IMM64(R2, 0x8182838485868788ULL),
  7502. BPF_LD_IMM64(R3, 0x0000000085868788ULL),
  7503. BPF_STX_MEM(BPF_W, R1, R2, 16384),
  7504. BPF_LDX_MEM(BPF_W, R0, R1, 16384),
  7505. BPF_JMP_REG(BPF_JNE, R0, R3, 1),
  7506. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7507. BPF_EXIT_INSN(),
  7508. },
  7509. INTERNAL | FLAG_LARGE_MEM,
  7510. { },
  7511. { { 16384 + 16, 0 } },
  7512. .stack_depth = 0,
  7513. },
  7514. {
  7515. "BPF_LDX_MEM | BPF_W, unaligned positive offset",
  7516. .u.insns_int = {
  7517. BPF_LD_IMM64(R2, 0x8182838485868788ULL),
  7518. BPF_LD_IMM64(R3, 0x0000000085868788ULL),
  7519. BPF_STX_MEM(BPF_W, R1, R2, 13),
  7520. BPF_LDX_MEM(BPF_W, R0, R1, 13),
  7521. BPF_JMP_REG(BPF_JNE, R0, R3, 1),
  7522. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7523. BPF_EXIT_INSN(),
  7524. },
  7525. INTERNAL | FLAG_LARGE_MEM,
  7526. { },
  7527. { { 32, 0 } },
  7528. .stack_depth = 0,
  7529. },
  7530. {
  7531. "BPF_LDX_MEM | BPF_DW, base",
  7532. .u.insns_int = {
  7533. BPF_LD_IMM64(R1, 0x0102030405060708ULL),
  7534. BPF_STX_MEM(BPF_DW, R10, R1, -8),
  7535. BPF_LDX_MEM(BPF_DW, R0, R10, -8),
  7536. BPF_JMP_REG(BPF_JNE, R0, R1, 1),
  7537. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7538. BPF_EXIT_INSN(),
  7539. },
  7540. INTERNAL,
  7541. { },
  7542. { { 0, 0 } },
  7543. .stack_depth = 8,
  7544. },
  7545. {
  7546. "BPF_LDX_MEM | BPF_DW, MSB set",
  7547. .u.insns_int = {
  7548. BPF_LD_IMM64(R1, 0x8182838485868788ULL),
  7549. BPF_STX_MEM(BPF_DW, R10, R1, -8),
  7550. BPF_LDX_MEM(BPF_DW, R0, R10, -8),
  7551. BPF_JMP_REG(BPF_JNE, R0, R1, 1),
  7552. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7553. BPF_EXIT_INSN(),
  7554. },
  7555. INTERNAL,
  7556. { },
  7557. { { 0, 0 } },
  7558. .stack_depth = 8,
  7559. },
  7560. {
  7561. "BPF_LDX_MEM | BPF_DW, negative offset",
  7562. .u.insns_int = {
  7563. BPF_LD_IMM64(R2, 0x8182838485868788ULL),
  7564. BPF_ALU64_IMM(BPF_ADD, R1, 512),
  7565. BPF_STX_MEM(BPF_DW, R1, R2, -256),
  7566. BPF_LDX_MEM(BPF_DW, R0, R1, -256),
  7567. BPF_JMP_REG(BPF_JNE, R0, R2, 1),
  7568. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7569. BPF_EXIT_INSN(),
  7570. },
  7571. INTERNAL | FLAG_LARGE_MEM,
  7572. { },
  7573. { { 512, 0 } },
  7574. .stack_depth = 0,
  7575. },
  7576. {
  7577. "BPF_LDX_MEM | BPF_DW, small positive offset",
  7578. .u.insns_int = {
  7579. BPF_LD_IMM64(R2, 0x8182838485868788ULL),
  7580. BPF_STX_MEM(BPF_DW, R1, R2, 256),
  7581. BPF_LDX_MEM(BPF_DW, R0, R1, 256),
  7582. BPF_JMP_REG(BPF_JNE, R0, R2, 1),
  7583. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7584. BPF_EXIT_INSN(),
  7585. },
  7586. INTERNAL | FLAG_LARGE_MEM,
  7587. { },
  7588. { { 512, 0 } },
  7589. .stack_depth = 8,
  7590. },
  7591. {
  7592. "BPF_LDX_MEM | BPF_DW, large positive offset",
  7593. .u.insns_int = {
  7594. BPF_LD_IMM64(R2, 0x8182838485868788ULL),
  7595. BPF_STX_MEM(BPF_DW, R1, R2, 32760),
  7596. BPF_LDX_MEM(BPF_DW, R0, R1, 32760),
  7597. BPF_JMP_REG(BPF_JNE, R0, R2, 1),
  7598. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7599. BPF_EXIT_INSN(),
  7600. },
  7601. INTERNAL | FLAG_LARGE_MEM,
  7602. { },
  7603. { { 32768, 0 } },
  7604. .stack_depth = 0,
  7605. },
  7606. {
  7607. "BPF_LDX_MEM | BPF_DW, unaligned positive offset",
  7608. .u.insns_int = {
  7609. BPF_LD_IMM64(R2, 0x8182838485868788ULL),
  7610. BPF_STX_MEM(BPF_DW, R1, R2, 13),
  7611. BPF_LDX_MEM(BPF_DW, R0, R1, 13),
  7612. BPF_JMP_REG(BPF_JNE, R0, R2, 1),
  7613. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7614. BPF_EXIT_INSN(),
  7615. },
  7616. INTERNAL | FLAG_LARGE_MEM,
  7617. { },
  7618. { { 32, 0 } },
  7619. .stack_depth = 0,
  7620. },
  7621. /* BPF_STX_MEM B/H/W/DW */
  7622. {
  7623. "BPF_STX_MEM | BPF_B",
  7624. .u.insns_int = {
  7625. BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
  7626. BPF_LD_IMM64(R2, 0x0102030405060708ULL),
  7627. BPF_LD_IMM64(R3, 0x8090a0b0c0d0e008ULL),
  7628. BPF_STX_MEM(BPF_DW, R10, R1, -8),
  7629. #ifdef __BIG_ENDIAN
  7630. BPF_STX_MEM(BPF_B, R10, R2, -1),
  7631. #else
  7632. BPF_STX_MEM(BPF_B, R10, R2, -8),
  7633. #endif
  7634. BPF_LDX_MEM(BPF_DW, R0, R10, -8),
  7635. BPF_JMP_REG(BPF_JNE, R0, R3, 1),
  7636. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7637. BPF_EXIT_INSN(),
  7638. },
  7639. INTERNAL,
  7640. { },
  7641. { { 0, 0 } },
  7642. .stack_depth = 8,
  7643. },
  7644. {
  7645. "BPF_STX_MEM | BPF_B, MSB set",
  7646. .u.insns_int = {
  7647. BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
  7648. BPF_LD_IMM64(R2, 0x8182838485868788ULL),
  7649. BPF_LD_IMM64(R3, 0x8090a0b0c0d0e088ULL),
  7650. BPF_STX_MEM(BPF_DW, R10, R1, -8),
  7651. #ifdef __BIG_ENDIAN
  7652. BPF_STX_MEM(BPF_B, R10, R2, -1),
  7653. #else
  7654. BPF_STX_MEM(BPF_B, R10, R2, -8),
  7655. #endif
  7656. BPF_LDX_MEM(BPF_DW, R0, R10, -8),
  7657. BPF_JMP_REG(BPF_JNE, R0, R3, 1),
  7658. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7659. BPF_EXIT_INSN(),
  7660. },
  7661. INTERNAL,
  7662. { },
  7663. { { 0, 0 } },
  7664. .stack_depth = 8,
  7665. },
  7666. {
  7667. "BPF_STX_MEM | BPF_H",
  7668. .u.insns_int = {
  7669. BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
  7670. BPF_LD_IMM64(R2, 0x0102030405060708ULL),
  7671. BPF_LD_IMM64(R3, 0x8090a0b0c0d00708ULL),
  7672. BPF_STX_MEM(BPF_DW, R10, R1, -8),
  7673. #ifdef __BIG_ENDIAN
  7674. BPF_STX_MEM(BPF_H, R10, R2, -2),
  7675. #else
  7676. BPF_STX_MEM(BPF_H, R10, R2, -8),
  7677. #endif
  7678. BPF_LDX_MEM(BPF_DW, R0, R10, -8),
  7679. BPF_JMP_REG(BPF_JNE, R0, R3, 1),
  7680. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7681. BPF_EXIT_INSN(),
  7682. },
  7683. INTERNAL,
  7684. { },
  7685. { { 0, 0 } },
  7686. .stack_depth = 8,
  7687. },
  7688. {
  7689. "BPF_STX_MEM | BPF_H, MSB set",
  7690. .u.insns_int = {
  7691. BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
  7692. BPF_LD_IMM64(R2, 0x8182838485868788ULL),
  7693. BPF_LD_IMM64(R3, 0x8090a0b0c0d08788ULL),
  7694. BPF_STX_MEM(BPF_DW, R10, R1, -8),
  7695. #ifdef __BIG_ENDIAN
  7696. BPF_STX_MEM(BPF_H, R10, R2, -2),
  7697. #else
  7698. BPF_STX_MEM(BPF_H, R10, R2, -8),
  7699. #endif
  7700. BPF_LDX_MEM(BPF_DW, R0, R10, -8),
  7701. BPF_JMP_REG(BPF_JNE, R0, R3, 1),
  7702. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7703. BPF_EXIT_INSN(),
  7704. },
  7705. INTERNAL,
  7706. { },
  7707. { { 0, 0 } },
  7708. .stack_depth = 8,
  7709. },
  7710. {
  7711. "BPF_STX_MEM | BPF_W",
  7712. .u.insns_int = {
  7713. BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
  7714. BPF_LD_IMM64(R2, 0x0102030405060708ULL),
  7715. BPF_LD_IMM64(R3, 0x8090a0b005060708ULL),
  7716. BPF_STX_MEM(BPF_DW, R10, R1, -8),
  7717. #ifdef __BIG_ENDIAN
  7718. BPF_STX_MEM(BPF_W, R10, R2, -4),
  7719. #else
  7720. BPF_STX_MEM(BPF_W, R10, R2, -8),
  7721. #endif
  7722. BPF_LDX_MEM(BPF_DW, R0, R10, -8),
  7723. BPF_JMP_REG(BPF_JNE, R0, R3, 1),
  7724. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7725. BPF_EXIT_INSN(),
  7726. },
  7727. INTERNAL,
  7728. { },
  7729. { { 0, 0 } },
  7730. .stack_depth = 8,
  7731. },
  7732. {
  7733. "BPF_STX_MEM | BPF_W, MSB set",
  7734. .u.insns_int = {
  7735. BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
  7736. BPF_LD_IMM64(R2, 0x8182838485868788ULL),
  7737. BPF_LD_IMM64(R3, 0x8090a0b085868788ULL),
  7738. BPF_STX_MEM(BPF_DW, R10, R1, -8),
  7739. #ifdef __BIG_ENDIAN
  7740. BPF_STX_MEM(BPF_W, R10, R2, -4),
  7741. #else
  7742. BPF_STX_MEM(BPF_W, R10, R2, -8),
  7743. #endif
  7744. BPF_LDX_MEM(BPF_DW, R0, R10, -8),
  7745. BPF_JMP_REG(BPF_JNE, R0, R3, 1),
  7746. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  7747. BPF_EXIT_INSN(),
  7748. },
  7749. INTERNAL,
  7750. { },
  7751. { { 0, 0 } },
  7752. .stack_depth = 8,
  7753. },
  7754. /* BPF_ST(X) | BPF_MEM | BPF_B/H/W/DW */
  7755. {
  7756. "ST_MEM_B: Store/Load byte: max negative",
  7757. .u.insns_int = {
  7758. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  7759. BPF_ST_MEM(BPF_B, R10, -40, 0xff),
  7760. BPF_LDX_MEM(BPF_B, R0, R10, -40),
  7761. BPF_EXIT_INSN(),
  7762. },
  7763. INTERNAL,
  7764. { },
  7765. { { 0, 0xff } },
  7766. .stack_depth = 40,
  7767. },
  7768. {
  7769. "ST_MEM_B: Store/Load byte: max positive",
  7770. .u.insns_int = {
  7771. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  7772. BPF_ST_MEM(BPF_H, R10, -40, 0x7f),
  7773. BPF_LDX_MEM(BPF_H, R0, R10, -40),
  7774. BPF_EXIT_INSN(),
  7775. },
  7776. INTERNAL,
  7777. { },
  7778. { { 0, 0x7f } },
  7779. .stack_depth = 40,
  7780. },
  7781. {
  7782. "STX_MEM_B: Store/Load byte: max negative",
  7783. .u.insns_int = {
  7784. BPF_LD_IMM64(R0, 0),
  7785. BPF_LD_IMM64(R1, 0xffLL),
  7786. BPF_STX_MEM(BPF_B, R10, R1, -40),
  7787. BPF_LDX_MEM(BPF_B, R0, R10, -40),
  7788. BPF_EXIT_INSN(),
  7789. },
  7790. INTERNAL,
  7791. { },
  7792. { { 0, 0xff } },
  7793. .stack_depth = 40,
  7794. },
  7795. {
  7796. "ST_MEM_H: Store/Load half word: max negative",
  7797. .u.insns_int = {
  7798. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  7799. BPF_ST_MEM(BPF_H, R10, -40, 0xffff),
  7800. BPF_LDX_MEM(BPF_H, R0, R10, -40),
  7801. BPF_EXIT_INSN(),
  7802. },
  7803. INTERNAL,
  7804. { },
  7805. { { 0, 0xffff } },
  7806. .stack_depth = 40,
  7807. },
  7808. {
  7809. "ST_MEM_H: Store/Load half word: max positive",
  7810. .u.insns_int = {
  7811. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  7812. BPF_ST_MEM(BPF_H, R10, -40, 0x7fff),
  7813. BPF_LDX_MEM(BPF_H, R0, R10, -40),
  7814. BPF_EXIT_INSN(),
  7815. },
  7816. INTERNAL,
  7817. { },
  7818. { { 0, 0x7fff } },
  7819. .stack_depth = 40,
  7820. },
  7821. {
  7822. "STX_MEM_H: Store/Load half word: max negative",
  7823. .u.insns_int = {
  7824. BPF_LD_IMM64(R0, 0),
  7825. BPF_LD_IMM64(R1, 0xffffLL),
  7826. BPF_STX_MEM(BPF_H, R10, R1, -40),
  7827. BPF_LDX_MEM(BPF_H, R0, R10, -40),
  7828. BPF_EXIT_INSN(),
  7829. },
  7830. INTERNAL,
  7831. { },
  7832. { { 0, 0xffff } },
  7833. .stack_depth = 40,
  7834. },
  7835. {
  7836. "ST_MEM_W: Store/Load word: max negative",
  7837. .u.insns_int = {
  7838. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  7839. BPF_ST_MEM(BPF_W, R10, -40, 0xffffffff),
  7840. BPF_LDX_MEM(BPF_W, R0, R10, -40),
  7841. BPF_EXIT_INSN(),
  7842. },
  7843. INTERNAL,
  7844. { },
  7845. { { 0, 0xffffffff } },
  7846. .stack_depth = 40,
  7847. },
  7848. {
  7849. "ST_MEM_W: Store/Load word: max positive",
  7850. .u.insns_int = {
  7851. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  7852. BPF_ST_MEM(BPF_W, R10, -40, 0x7fffffff),
  7853. BPF_LDX_MEM(BPF_W, R0, R10, -40),
  7854. BPF_EXIT_INSN(),
  7855. },
  7856. INTERNAL,
  7857. { },
  7858. { { 0, 0x7fffffff } },
  7859. .stack_depth = 40,
  7860. },
  7861. {
  7862. "STX_MEM_W: Store/Load word: max negative",
  7863. .u.insns_int = {
  7864. BPF_LD_IMM64(R0, 0),
  7865. BPF_LD_IMM64(R1, 0xffffffffLL),
  7866. BPF_STX_MEM(BPF_W, R10, R1, -40),
  7867. BPF_LDX_MEM(BPF_W, R0, R10, -40),
  7868. BPF_EXIT_INSN(),
  7869. },
  7870. INTERNAL,
  7871. { },
  7872. { { 0, 0xffffffff } },
  7873. .stack_depth = 40,
  7874. },
  7875. {
  7876. "ST_MEM_DW: Store/Load double word: max negative",
  7877. .u.insns_int = {
  7878. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  7879. BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
  7880. BPF_LDX_MEM(BPF_DW, R0, R10, -40),
  7881. BPF_EXIT_INSN(),
  7882. },
  7883. INTERNAL,
  7884. { },
  7885. { { 0, 0xffffffff } },
  7886. .stack_depth = 40,
  7887. },
  7888. {
  7889. "ST_MEM_DW: Store/Load double word: max negative 2",
  7890. .u.insns_int = {
  7891. BPF_LD_IMM64(R2, 0xffff00000000ffffLL),
  7892. BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
  7893. BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
  7894. BPF_LDX_MEM(BPF_DW, R2, R10, -40),
  7895. BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
  7896. BPF_MOV32_IMM(R0, 2),
  7897. BPF_EXIT_INSN(),
  7898. BPF_MOV32_IMM(R0, 1),
  7899. BPF_EXIT_INSN(),
  7900. },
  7901. INTERNAL,
  7902. { },
  7903. { { 0, 0x1 } },
  7904. .stack_depth = 40,
  7905. },
  7906. {
  7907. "ST_MEM_DW: Store/Load double word: max positive",
  7908. .u.insns_int = {
  7909. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  7910. BPF_ST_MEM(BPF_DW, R10, -40, 0x7fffffff),
  7911. BPF_LDX_MEM(BPF_DW, R0, R10, -40),
  7912. BPF_EXIT_INSN(),
  7913. },
  7914. INTERNAL,
  7915. { },
  7916. { { 0, 0x7fffffff } },
  7917. .stack_depth = 40,
  7918. },
  7919. {
  7920. "STX_MEM_DW: Store/Load double word: max negative",
  7921. .u.insns_int = {
  7922. BPF_LD_IMM64(R0, 0),
  7923. BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
  7924. BPF_STX_MEM(BPF_DW, R10, R1, -40),
  7925. BPF_LDX_MEM(BPF_DW, R0, R10, -40),
  7926. BPF_EXIT_INSN(),
  7927. },
  7928. INTERNAL,
  7929. { },
  7930. { { 0, 0xffffffff } },
  7931. .stack_depth = 40,
  7932. },
  7933. {
  7934. "STX_MEM_DW: Store double word: first word in memory",
  7935. .u.insns_int = {
  7936. BPF_LD_IMM64(R0, 0),
  7937. BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
  7938. BPF_STX_MEM(BPF_DW, R10, R1, -40),
  7939. BPF_LDX_MEM(BPF_W, R0, R10, -40),
  7940. BPF_EXIT_INSN(),
  7941. },
  7942. INTERNAL,
  7943. { },
  7944. #ifdef __BIG_ENDIAN
  7945. { { 0, 0x01234567 } },
  7946. #else
  7947. { { 0, 0x89abcdef } },
  7948. #endif
  7949. .stack_depth = 40,
  7950. },
  7951. {
  7952. "STX_MEM_DW: Store double word: second word in memory",
  7953. .u.insns_int = {
  7954. BPF_LD_IMM64(R0, 0),
  7955. BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
  7956. BPF_STX_MEM(BPF_DW, R10, R1, -40),
  7957. BPF_LDX_MEM(BPF_W, R0, R10, -36),
  7958. BPF_EXIT_INSN(),
  7959. },
  7960. INTERNAL,
  7961. { },
  7962. #ifdef __BIG_ENDIAN
  7963. { { 0, 0x89abcdef } },
  7964. #else
  7965. { { 0, 0x01234567 } },
  7966. #endif
  7967. .stack_depth = 40,
  7968. },
  7969. /* BPF_STX | BPF_ATOMIC | BPF_W/DW */
  7970. {
  7971. "STX_XADD_W: X + 1 + 1 + 1 + ...",
  7972. { },
  7973. INTERNAL,
  7974. { },
  7975. { { 0, 4134 } },
  7976. .fill_helper = bpf_fill_stxw,
  7977. },
  7978. {
  7979. "STX_XADD_DW: X + 1 + 1 + 1 + ...",
  7980. { },
  7981. INTERNAL,
  7982. { },
  7983. { { 0, 4134 } },
  7984. .fill_helper = bpf_fill_stxdw,
  7985. },
  7986. /*
  7987. * Exhaustive tests of atomic operation variants.
  7988. * Individual tests are expanded from template macros for all
  7989. * combinations of ALU operation, word size and fetching.
  7990. */
  7991. #define BPF_ATOMIC_POISON(width) ((width) == BPF_W ? (0xbaadf00dULL << 32) : 0)
  7992. #define BPF_ATOMIC_OP_TEST1(width, op, logic, old, update, result) \
  7993. { \
  7994. "BPF_ATOMIC | " #width ", " #op ": Test: " \
  7995. #old " " #logic " " #update " = " #result, \
  7996. .u.insns_int = { \
  7997. BPF_LD_IMM64(R5, (update) | BPF_ATOMIC_POISON(width)), \
  7998. BPF_ST_MEM(width, R10, -40, old), \
  7999. BPF_ATOMIC_OP(width, op, R10, R5, -40), \
  8000. BPF_LDX_MEM(width, R0, R10, -40), \
  8001. BPF_ALU64_REG(BPF_MOV, R1, R0), \
  8002. BPF_ALU64_IMM(BPF_RSH, R1, 32), \
  8003. BPF_ALU64_REG(BPF_OR, R0, R1), \
  8004. BPF_EXIT_INSN(), \
  8005. }, \
  8006. INTERNAL, \
  8007. { }, \
  8008. { { 0, result } }, \
  8009. .stack_depth = 40, \
  8010. }
  8011. #define BPF_ATOMIC_OP_TEST2(width, op, logic, old, update, result) \
  8012. { \
  8013. "BPF_ATOMIC | " #width ", " #op ": Test side effects, r10: " \
  8014. #old " " #logic " " #update " = " #result, \
  8015. .u.insns_int = { \
  8016. BPF_ALU64_REG(BPF_MOV, R1, R10), \
  8017. BPF_LD_IMM64(R0, (update) | BPF_ATOMIC_POISON(width)), \
  8018. BPF_ST_MEM(BPF_W, R10, -40, old), \
  8019. BPF_ATOMIC_OP(width, op, R10, R0, -40), \
  8020. BPF_ALU64_REG(BPF_MOV, R0, R10), \
  8021. BPF_ALU64_REG(BPF_SUB, R0, R1), \
  8022. BPF_ALU64_REG(BPF_MOV, R1, R0), \
  8023. BPF_ALU64_IMM(BPF_RSH, R1, 32), \
  8024. BPF_ALU64_REG(BPF_OR, R0, R1), \
  8025. BPF_EXIT_INSN(), \
  8026. }, \
  8027. INTERNAL, \
  8028. { }, \
  8029. { { 0, 0 } }, \
  8030. .stack_depth = 40, \
  8031. }
  8032. #define BPF_ATOMIC_OP_TEST3(width, op, logic, old, update, result) \
  8033. { \
  8034. "BPF_ATOMIC | " #width ", " #op ": Test side effects, r0: " \
  8035. #old " " #logic " " #update " = " #result, \
  8036. .u.insns_int = { \
  8037. BPF_ALU64_REG(BPF_MOV, R0, R10), \
  8038. BPF_LD_IMM64(R1, (update) | BPF_ATOMIC_POISON(width)), \
  8039. BPF_ST_MEM(width, R10, -40, old), \
  8040. BPF_ATOMIC_OP(width, op, R10, R1, -40), \
  8041. BPF_ALU64_REG(BPF_SUB, R0, R10), \
  8042. BPF_ALU64_REG(BPF_MOV, R1, R0), \
  8043. BPF_ALU64_IMM(BPF_RSH, R1, 32), \
  8044. BPF_ALU64_REG(BPF_OR, R0, R1), \
  8045. BPF_EXIT_INSN(), \
  8046. }, \
  8047. INTERNAL, \
  8048. { }, \
  8049. { { 0, 0 } }, \
  8050. .stack_depth = 40, \
  8051. }
  8052. #define BPF_ATOMIC_OP_TEST4(width, op, logic, old, update, result) \
  8053. { \
  8054. "BPF_ATOMIC | " #width ", " #op ": Test fetch: " \
  8055. #old " " #logic " " #update " = " #result, \
  8056. .u.insns_int = { \
  8057. BPF_LD_IMM64(R3, (update) | BPF_ATOMIC_POISON(width)), \
  8058. BPF_ST_MEM(width, R10, -40, old), \
  8059. BPF_ATOMIC_OP(width, op, R10, R3, -40), \
  8060. BPF_ALU32_REG(BPF_MOV, R0, R3), \
  8061. BPF_EXIT_INSN(), \
  8062. }, \
  8063. INTERNAL, \
  8064. { }, \
  8065. { { 0, (op) & BPF_FETCH ? old : update } }, \
  8066. .stack_depth = 40, \
  8067. }
  8068. /* BPF_ATOMIC | BPF_W: BPF_ADD */
  8069. BPF_ATOMIC_OP_TEST1(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
  8070. BPF_ATOMIC_OP_TEST2(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
  8071. BPF_ATOMIC_OP_TEST3(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
  8072. BPF_ATOMIC_OP_TEST4(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
  8073. /* BPF_ATOMIC | BPF_W: BPF_ADD | BPF_FETCH */
  8074. BPF_ATOMIC_OP_TEST1(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
  8075. BPF_ATOMIC_OP_TEST2(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
  8076. BPF_ATOMIC_OP_TEST3(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
  8077. BPF_ATOMIC_OP_TEST4(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
  8078. /* BPF_ATOMIC | BPF_DW: BPF_ADD */
  8079. BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
  8080. BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
  8081. BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
  8082. BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
  8083. /* BPF_ATOMIC | BPF_DW: BPF_ADD | BPF_FETCH */
  8084. BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
  8085. BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
  8086. BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
  8087. BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
  8088. /* BPF_ATOMIC | BPF_W: BPF_AND */
  8089. BPF_ATOMIC_OP_TEST1(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
  8090. BPF_ATOMIC_OP_TEST2(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
  8091. BPF_ATOMIC_OP_TEST3(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
  8092. BPF_ATOMIC_OP_TEST4(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
  8093. /* BPF_ATOMIC | BPF_W: BPF_AND | BPF_FETCH */
  8094. BPF_ATOMIC_OP_TEST1(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
  8095. BPF_ATOMIC_OP_TEST2(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
  8096. BPF_ATOMIC_OP_TEST3(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
  8097. BPF_ATOMIC_OP_TEST4(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
  8098. /* BPF_ATOMIC | BPF_DW: BPF_AND */
  8099. BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
  8100. BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
  8101. BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
  8102. BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
  8103. /* BPF_ATOMIC | BPF_DW: BPF_AND | BPF_FETCH */
  8104. BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
  8105. BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
  8106. BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
  8107. BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
  8108. /* BPF_ATOMIC | BPF_W: BPF_OR */
  8109. BPF_ATOMIC_OP_TEST1(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
  8110. BPF_ATOMIC_OP_TEST2(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
  8111. BPF_ATOMIC_OP_TEST3(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
  8112. BPF_ATOMIC_OP_TEST4(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
  8113. /* BPF_ATOMIC | BPF_W: BPF_OR | BPF_FETCH */
  8114. BPF_ATOMIC_OP_TEST1(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
  8115. BPF_ATOMIC_OP_TEST2(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
  8116. BPF_ATOMIC_OP_TEST3(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
  8117. BPF_ATOMIC_OP_TEST4(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
  8118. /* BPF_ATOMIC | BPF_DW: BPF_OR */
  8119. BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
  8120. BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
  8121. BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
  8122. BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
  8123. /* BPF_ATOMIC | BPF_DW: BPF_OR | BPF_FETCH */
  8124. BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
  8125. BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
  8126. BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
  8127. BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
  8128. /* BPF_ATOMIC | BPF_W: BPF_XOR */
  8129. BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
  8130. BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
  8131. BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
  8132. BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
  8133. /* BPF_ATOMIC | BPF_W: BPF_XOR | BPF_FETCH */
  8134. BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
  8135. BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
  8136. BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
  8137. BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
  8138. /* BPF_ATOMIC | BPF_DW: BPF_XOR */
  8139. BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
  8140. BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
  8141. BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
  8142. BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
  8143. /* BPF_ATOMIC | BPF_DW: BPF_XOR | BPF_FETCH */
  8144. BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
  8145. BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
  8146. BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
  8147. BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
  8148. /* BPF_ATOMIC | BPF_W: BPF_XCHG */
  8149. BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
  8150. BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
  8151. BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
  8152. BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
  8153. /* BPF_ATOMIC | BPF_DW: BPF_XCHG */
  8154. BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
  8155. BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
  8156. BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
  8157. BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
  8158. #undef BPF_ATOMIC_POISON
  8159. #undef BPF_ATOMIC_OP_TEST1
  8160. #undef BPF_ATOMIC_OP_TEST2
  8161. #undef BPF_ATOMIC_OP_TEST3
  8162. #undef BPF_ATOMIC_OP_TEST4
  8163. /* BPF_ATOMIC | BPF_W, BPF_CMPXCHG */
  8164. {
  8165. "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful return",
  8166. .u.insns_int = {
  8167. BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
  8168. BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
  8169. BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
  8170. BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
  8171. BPF_EXIT_INSN(),
  8172. },
  8173. INTERNAL,
  8174. { },
  8175. { { 0, 0x01234567 } },
  8176. .stack_depth = 40,
  8177. },
  8178. {
  8179. "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful store",
  8180. .u.insns_int = {
  8181. BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
  8182. BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
  8183. BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
  8184. BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
  8185. BPF_LDX_MEM(BPF_W, R0, R10, -40),
  8186. BPF_EXIT_INSN(),
  8187. },
  8188. INTERNAL,
  8189. { },
  8190. { { 0, 0x89abcdef } },
  8191. .stack_depth = 40,
  8192. },
  8193. {
  8194. "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure return",
  8195. .u.insns_int = {
  8196. BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
  8197. BPF_ALU32_IMM(BPF_MOV, R0, 0x76543210),
  8198. BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
  8199. BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
  8200. BPF_EXIT_INSN(),
  8201. },
  8202. INTERNAL,
  8203. { },
  8204. { { 0, 0x01234567 } },
  8205. .stack_depth = 40,
  8206. },
  8207. {
  8208. "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure store",
  8209. .u.insns_int = {
  8210. BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
  8211. BPF_ALU32_IMM(BPF_MOV, R0, 0x76543210),
  8212. BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
  8213. BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
  8214. BPF_LDX_MEM(BPF_W, R0, R10, -40),
  8215. BPF_EXIT_INSN(),
  8216. },
  8217. INTERNAL,
  8218. { },
  8219. { { 0, 0x01234567 } },
  8220. .stack_depth = 40,
  8221. },
  8222. {
  8223. "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test side effects",
  8224. .u.insns_int = {
  8225. BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
  8226. BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
  8227. BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
  8228. BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
  8229. BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
  8230. BPF_ALU32_REG(BPF_MOV, R0, R3),
  8231. BPF_EXIT_INSN(),
  8232. },
  8233. INTERNAL,
  8234. { },
  8235. { { 0, 0x89abcdef } },
  8236. .stack_depth = 40,
  8237. },
  8238. /* BPF_ATOMIC | BPF_DW, BPF_CMPXCHG */
  8239. {
  8240. "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful return",
  8241. .u.insns_int = {
  8242. BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
  8243. BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
  8244. BPF_ALU64_REG(BPF_MOV, R0, R1),
  8245. BPF_STX_MEM(BPF_DW, R10, R1, -40),
  8246. BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
  8247. BPF_JMP_REG(BPF_JNE, R0, R1, 1),
  8248. BPF_ALU64_REG(BPF_SUB, R0, R1),
  8249. BPF_EXIT_INSN(),
  8250. },
  8251. INTERNAL,
  8252. { },
  8253. { { 0, 0 } },
  8254. .stack_depth = 40,
  8255. },
  8256. {
  8257. "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful store",
  8258. .u.insns_int = {
  8259. BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
  8260. BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
  8261. BPF_ALU64_REG(BPF_MOV, R0, R1),
  8262. BPF_STX_MEM(BPF_DW, R10, R0, -40),
  8263. BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
  8264. BPF_LDX_MEM(BPF_DW, R0, R10, -40),
  8265. BPF_JMP_REG(BPF_JNE, R0, R2, 1),
  8266. BPF_ALU64_REG(BPF_SUB, R0, R2),
  8267. BPF_EXIT_INSN(),
  8268. },
  8269. INTERNAL,
  8270. { },
  8271. { { 0, 0 } },
  8272. .stack_depth = 40,
  8273. },
  8274. {
  8275. "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure return",
  8276. .u.insns_int = {
  8277. BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
  8278. BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
  8279. BPF_ALU64_REG(BPF_MOV, R0, R1),
  8280. BPF_ALU64_IMM(BPF_ADD, R0, 1),
  8281. BPF_STX_MEM(BPF_DW, R10, R1, -40),
  8282. BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
  8283. BPF_JMP_REG(BPF_JNE, R0, R1, 1),
  8284. BPF_ALU64_REG(BPF_SUB, R0, R1),
  8285. BPF_EXIT_INSN(),
  8286. },
  8287. INTERNAL,
  8288. { },
  8289. { { 0, 0 } },
  8290. .stack_depth = 40,
  8291. },
  8292. {
  8293. "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure store",
  8294. .u.insns_int = {
  8295. BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
  8296. BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
  8297. BPF_ALU64_REG(BPF_MOV, R0, R1),
  8298. BPF_ALU64_IMM(BPF_ADD, R0, 1),
  8299. BPF_STX_MEM(BPF_DW, R10, R1, -40),
  8300. BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
  8301. BPF_LDX_MEM(BPF_DW, R0, R10, -40),
  8302. BPF_JMP_REG(BPF_JNE, R0, R1, 1),
  8303. BPF_ALU64_REG(BPF_SUB, R0, R1),
  8304. BPF_EXIT_INSN(),
  8305. },
  8306. INTERNAL,
  8307. { },
  8308. { { 0, 0 } },
  8309. .stack_depth = 40,
  8310. },
  8311. {
  8312. "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test side effects",
  8313. .u.insns_int = {
  8314. BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
  8315. BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
  8316. BPF_ALU64_REG(BPF_MOV, R0, R1),
  8317. BPF_STX_MEM(BPF_DW, R10, R1, -40),
  8318. BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
  8319. BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
  8320. BPF_JMP_REG(BPF_JNE, R0, R2, 1),
  8321. BPF_ALU64_REG(BPF_SUB, R0, R2),
  8322. BPF_EXIT_INSN(),
  8323. },
  8324. INTERNAL,
  8325. { },
  8326. { { 0, 0 } },
  8327. .stack_depth = 40,
  8328. },
  8329. /* BPF_JMP32 | BPF_JEQ | BPF_K */
  8330. {
  8331. "JMP32_JEQ_K: Small immediate",
  8332. .u.insns_int = {
  8333. BPF_ALU32_IMM(BPF_MOV, R0, 123),
  8334. BPF_JMP32_IMM(BPF_JEQ, R0, 321, 1),
  8335. BPF_JMP32_IMM(BPF_JEQ, R0, 123, 1),
  8336. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8337. BPF_EXIT_INSN(),
  8338. },
  8339. INTERNAL,
  8340. { },
  8341. { { 0, 123 } }
  8342. },
  8343. {
  8344. "JMP32_JEQ_K: Large immediate",
  8345. .u.insns_int = {
  8346. BPF_ALU32_IMM(BPF_MOV, R0, 12345678),
  8347. BPF_JMP32_IMM(BPF_JEQ, R0, 12345678 & 0xffff, 1),
  8348. BPF_JMP32_IMM(BPF_JEQ, R0, 12345678, 1),
  8349. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8350. BPF_EXIT_INSN(),
  8351. },
  8352. INTERNAL,
  8353. { },
  8354. { { 0, 12345678 } }
  8355. },
  8356. {
  8357. "JMP32_JEQ_K: negative immediate",
  8358. .u.insns_int = {
  8359. BPF_ALU32_IMM(BPF_MOV, R0, -123),
  8360. BPF_JMP32_IMM(BPF_JEQ, R0, 123, 1),
  8361. BPF_JMP32_IMM(BPF_JEQ, R0, -123, 1),
  8362. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8363. BPF_EXIT_INSN(),
  8364. },
  8365. INTERNAL,
  8366. { },
  8367. { { 0, -123 } }
  8368. },
  8369. /* BPF_JMP32 | BPF_JEQ | BPF_X */
  8370. {
  8371. "JMP32_JEQ_X",
  8372. .u.insns_int = {
  8373. BPF_ALU32_IMM(BPF_MOV, R0, 1234),
  8374. BPF_ALU32_IMM(BPF_MOV, R1, 4321),
  8375. BPF_JMP32_REG(BPF_JEQ, R0, R1, 2),
  8376. BPF_ALU32_IMM(BPF_MOV, R1, 1234),
  8377. BPF_JMP32_REG(BPF_JEQ, R0, R1, 1),
  8378. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8379. BPF_EXIT_INSN(),
  8380. },
  8381. INTERNAL,
  8382. { },
  8383. { { 0, 1234 } }
  8384. },
  8385. /* BPF_JMP32 | BPF_JNE | BPF_K */
  8386. {
  8387. "JMP32_JNE_K: Small immediate",
  8388. .u.insns_int = {
  8389. BPF_ALU32_IMM(BPF_MOV, R0, 123),
  8390. BPF_JMP32_IMM(BPF_JNE, R0, 123, 1),
  8391. BPF_JMP32_IMM(BPF_JNE, R0, 321, 1),
  8392. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8393. BPF_EXIT_INSN(),
  8394. },
  8395. INTERNAL,
  8396. { },
  8397. { { 0, 123 } }
  8398. },
  8399. {
  8400. "JMP32_JNE_K: Large immediate",
  8401. .u.insns_int = {
  8402. BPF_ALU32_IMM(BPF_MOV, R0, 12345678),
  8403. BPF_JMP32_IMM(BPF_JNE, R0, 12345678, 1),
  8404. BPF_JMP32_IMM(BPF_JNE, R0, 12345678 & 0xffff, 1),
  8405. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8406. BPF_EXIT_INSN(),
  8407. },
  8408. INTERNAL,
  8409. { },
  8410. { { 0, 12345678 } }
  8411. },
  8412. {
  8413. "JMP32_JNE_K: negative immediate",
  8414. .u.insns_int = {
  8415. BPF_ALU32_IMM(BPF_MOV, R0, -123),
  8416. BPF_JMP32_IMM(BPF_JNE, R0, -123, 1),
  8417. BPF_JMP32_IMM(BPF_JNE, R0, 123, 1),
  8418. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8419. BPF_EXIT_INSN(),
  8420. },
  8421. INTERNAL,
  8422. { },
  8423. { { 0, -123 } }
  8424. },
  8425. /* BPF_JMP32 | BPF_JNE | BPF_X */
  8426. {
  8427. "JMP32_JNE_X",
  8428. .u.insns_int = {
  8429. BPF_ALU32_IMM(BPF_MOV, R0, 1234),
  8430. BPF_ALU32_IMM(BPF_MOV, R1, 1234),
  8431. BPF_JMP32_REG(BPF_JNE, R0, R1, 2),
  8432. BPF_ALU32_IMM(BPF_MOV, R1, 4321),
  8433. BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
  8434. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8435. BPF_EXIT_INSN(),
  8436. },
  8437. INTERNAL,
  8438. { },
  8439. { { 0, 1234 } }
  8440. },
  8441. /* BPF_JMP32 | BPF_JSET | BPF_K */
  8442. {
  8443. "JMP32_JSET_K: Small immediate",
  8444. .u.insns_int = {
  8445. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  8446. BPF_JMP32_IMM(BPF_JSET, R0, 2, 1),
  8447. BPF_JMP32_IMM(BPF_JSET, R0, 3, 1),
  8448. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8449. BPF_EXIT_INSN(),
  8450. },
  8451. INTERNAL,
  8452. { },
  8453. { { 0, 1 } }
  8454. },
  8455. {
  8456. "JMP32_JSET_K: Large immediate",
  8457. .u.insns_int = {
  8458. BPF_ALU32_IMM(BPF_MOV, R0, 0x40000000),
  8459. BPF_JMP32_IMM(BPF_JSET, R0, 0x3fffffff, 1),
  8460. BPF_JMP32_IMM(BPF_JSET, R0, 0x60000000, 1),
  8461. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8462. BPF_EXIT_INSN(),
  8463. },
  8464. INTERNAL,
  8465. { },
  8466. { { 0, 0x40000000 } }
  8467. },
  8468. {
  8469. "JMP32_JSET_K: negative immediate",
  8470. .u.insns_int = {
  8471. BPF_ALU32_IMM(BPF_MOV, R0, -123),
  8472. BPF_JMP32_IMM(BPF_JSET, R0, -1, 1),
  8473. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8474. BPF_EXIT_INSN(),
  8475. },
  8476. INTERNAL,
  8477. { },
  8478. { { 0, -123 } }
  8479. },
  8480. /* BPF_JMP32 | BPF_JSET | BPF_X */
  8481. {
  8482. "JMP32_JSET_X",
  8483. .u.insns_int = {
  8484. BPF_ALU32_IMM(BPF_MOV, R0, 8),
  8485. BPF_ALU32_IMM(BPF_MOV, R1, 7),
  8486. BPF_JMP32_REG(BPF_JSET, R0, R1, 2),
  8487. BPF_ALU32_IMM(BPF_MOV, R1, 8 | 2),
  8488. BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
  8489. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8490. BPF_EXIT_INSN(),
  8491. },
  8492. INTERNAL,
  8493. { },
  8494. { { 0, 8 } }
  8495. },
  8496. /* BPF_JMP32 | BPF_JGT | BPF_K */
  8497. {
  8498. "JMP32_JGT_K: Small immediate",
  8499. .u.insns_int = {
  8500. BPF_ALU32_IMM(BPF_MOV, R0, 123),
  8501. BPF_JMP32_IMM(BPF_JGT, R0, 123, 1),
  8502. BPF_JMP32_IMM(BPF_JGT, R0, 122, 1),
  8503. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8504. BPF_EXIT_INSN(),
  8505. },
  8506. INTERNAL,
  8507. { },
  8508. { { 0, 123 } }
  8509. },
  8510. {
  8511. "JMP32_JGT_K: Large immediate",
  8512. .u.insns_int = {
  8513. BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
  8514. BPF_JMP32_IMM(BPF_JGT, R0, 0xffffffff, 1),
  8515. BPF_JMP32_IMM(BPF_JGT, R0, 0xfffffffd, 1),
  8516. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8517. BPF_EXIT_INSN(),
  8518. },
  8519. INTERNAL,
  8520. { },
  8521. { { 0, 0xfffffffe } }
  8522. },
  8523. /* BPF_JMP32 | BPF_JGT | BPF_X */
  8524. {
  8525. "JMP32_JGT_X",
  8526. .u.insns_int = {
  8527. BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
  8528. BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
  8529. BPF_JMP32_REG(BPF_JGT, R0, R1, 2),
  8530. BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
  8531. BPF_JMP32_REG(BPF_JGT, R0, R1, 1),
  8532. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8533. BPF_EXIT_INSN(),
  8534. },
  8535. INTERNAL,
  8536. { },
  8537. { { 0, 0xfffffffe } }
  8538. },
  8539. /* BPF_JMP32 | BPF_JGE | BPF_K */
  8540. {
  8541. "JMP32_JGE_K: Small immediate",
  8542. .u.insns_int = {
  8543. BPF_ALU32_IMM(BPF_MOV, R0, 123),
  8544. BPF_JMP32_IMM(BPF_JGE, R0, 124, 1),
  8545. BPF_JMP32_IMM(BPF_JGE, R0, 123, 1),
  8546. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8547. BPF_EXIT_INSN(),
  8548. },
  8549. INTERNAL,
  8550. { },
  8551. { { 0, 123 } }
  8552. },
  8553. {
  8554. "JMP32_JGE_K: Large immediate",
  8555. .u.insns_int = {
  8556. BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
  8557. BPF_JMP32_IMM(BPF_JGE, R0, 0xffffffff, 1),
  8558. BPF_JMP32_IMM(BPF_JGE, R0, 0xfffffffe, 1),
  8559. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8560. BPF_EXIT_INSN(),
  8561. },
  8562. INTERNAL,
  8563. { },
  8564. { { 0, 0xfffffffe } }
  8565. },
  8566. /* BPF_JMP32 | BPF_JGE | BPF_X */
  8567. {
  8568. "JMP32_JGE_X",
  8569. .u.insns_int = {
  8570. BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
  8571. BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
  8572. BPF_JMP32_REG(BPF_JGE, R0, R1, 2),
  8573. BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
  8574. BPF_JMP32_REG(BPF_JGE, R0, R1, 1),
  8575. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8576. BPF_EXIT_INSN(),
  8577. },
  8578. INTERNAL,
  8579. { },
  8580. { { 0, 0xfffffffe } }
  8581. },
  8582. /* BPF_JMP32 | BPF_JLT | BPF_K */
  8583. {
  8584. "JMP32_JLT_K: Small immediate",
  8585. .u.insns_int = {
  8586. BPF_ALU32_IMM(BPF_MOV, R0, 123),
  8587. BPF_JMP32_IMM(BPF_JLT, R0, 123, 1),
  8588. BPF_JMP32_IMM(BPF_JLT, R0, 124, 1),
  8589. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8590. BPF_EXIT_INSN(),
  8591. },
  8592. INTERNAL,
  8593. { },
  8594. { { 0, 123 } }
  8595. },
  8596. {
  8597. "JMP32_JLT_K: Large immediate",
  8598. .u.insns_int = {
  8599. BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
  8600. BPF_JMP32_IMM(BPF_JLT, R0, 0xfffffffd, 1),
  8601. BPF_JMP32_IMM(BPF_JLT, R0, 0xffffffff, 1),
  8602. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8603. BPF_EXIT_INSN(),
  8604. },
  8605. INTERNAL,
  8606. { },
  8607. { { 0, 0xfffffffe } }
  8608. },
  8609. /* BPF_JMP32 | BPF_JLT | BPF_X */
  8610. {
  8611. "JMP32_JLT_X",
  8612. .u.insns_int = {
  8613. BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
  8614. BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
  8615. BPF_JMP32_REG(BPF_JLT, R0, R1, 2),
  8616. BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
  8617. BPF_JMP32_REG(BPF_JLT, R0, R1, 1),
  8618. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8619. BPF_EXIT_INSN(),
  8620. },
  8621. INTERNAL,
  8622. { },
  8623. { { 0, 0xfffffffe } }
  8624. },
  8625. /* BPF_JMP32 | BPF_JLE | BPF_K */
  8626. {
  8627. "JMP32_JLE_K: Small immediate",
  8628. .u.insns_int = {
  8629. BPF_ALU32_IMM(BPF_MOV, R0, 123),
  8630. BPF_JMP32_IMM(BPF_JLE, R0, 122, 1),
  8631. BPF_JMP32_IMM(BPF_JLE, R0, 123, 1),
  8632. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8633. BPF_EXIT_INSN(),
  8634. },
  8635. INTERNAL,
  8636. { },
  8637. { { 0, 123 } }
  8638. },
  8639. {
  8640. "JMP32_JLE_K: Large immediate",
  8641. .u.insns_int = {
  8642. BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
  8643. BPF_JMP32_IMM(BPF_JLE, R0, 0xfffffffd, 1),
  8644. BPF_JMP32_IMM(BPF_JLE, R0, 0xfffffffe, 1),
  8645. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8646. BPF_EXIT_INSN(),
  8647. },
  8648. INTERNAL,
  8649. { },
  8650. { { 0, 0xfffffffe } }
  8651. },
  8652. /* BPF_JMP32 | BPF_JLE | BPF_X */
  8653. {
  8654. "JMP32_JLE_X",
  8655. .u.insns_int = {
  8656. BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
  8657. BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
  8658. BPF_JMP32_REG(BPF_JLE, R0, R1, 2),
  8659. BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
  8660. BPF_JMP32_REG(BPF_JLE, R0, R1, 1),
  8661. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8662. BPF_EXIT_INSN(),
  8663. },
  8664. INTERNAL,
  8665. { },
  8666. { { 0, 0xfffffffe } }
  8667. },
  8668. /* BPF_JMP32 | BPF_JSGT | BPF_K */
  8669. {
  8670. "JMP32_JSGT_K: Small immediate",
  8671. .u.insns_int = {
  8672. BPF_ALU32_IMM(BPF_MOV, R0, -123),
  8673. BPF_JMP32_IMM(BPF_JSGT, R0, -123, 1),
  8674. BPF_JMP32_IMM(BPF_JSGT, R0, -124, 1),
  8675. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8676. BPF_EXIT_INSN(),
  8677. },
  8678. INTERNAL,
  8679. { },
  8680. { { 0, -123 } }
  8681. },
  8682. {
  8683. "JMP32_JSGT_K: Large immediate",
  8684. .u.insns_int = {
  8685. BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
  8686. BPF_JMP32_IMM(BPF_JSGT, R0, -12345678, 1),
  8687. BPF_JMP32_IMM(BPF_JSGT, R0, -12345679, 1),
  8688. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8689. BPF_EXIT_INSN(),
  8690. },
  8691. INTERNAL,
  8692. { },
  8693. { { 0, -12345678 } }
  8694. },
  8695. /* BPF_JMP32 | BPF_JSGT | BPF_X */
  8696. {
  8697. "JMP32_JSGT_X",
  8698. .u.insns_int = {
  8699. BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
  8700. BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
  8701. BPF_JMP32_REG(BPF_JSGT, R0, R1, 2),
  8702. BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
  8703. BPF_JMP32_REG(BPF_JSGT, R0, R1, 1),
  8704. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8705. BPF_EXIT_INSN(),
  8706. },
  8707. INTERNAL,
  8708. { },
  8709. { { 0, -12345678 } }
  8710. },
  8711. /* BPF_JMP32 | BPF_JSGE | BPF_K */
  8712. {
  8713. "JMP32_JSGE_K: Small immediate",
  8714. .u.insns_int = {
  8715. BPF_ALU32_IMM(BPF_MOV, R0, -123),
  8716. BPF_JMP32_IMM(BPF_JSGE, R0, -122, 1),
  8717. BPF_JMP32_IMM(BPF_JSGE, R0, -123, 1),
  8718. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8719. BPF_EXIT_INSN(),
  8720. },
  8721. INTERNAL,
  8722. { },
  8723. { { 0, -123 } }
  8724. },
  8725. {
  8726. "JMP32_JSGE_K: Large immediate",
  8727. .u.insns_int = {
  8728. BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
  8729. BPF_JMP32_IMM(BPF_JSGE, R0, -12345677, 1),
  8730. BPF_JMP32_IMM(BPF_JSGE, R0, -12345678, 1),
  8731. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8732. BPF_EXIT_INSN(),
  8733. },
  8734. INTERNAL,
  8735. { },
  8736. { { 0, -12345678 } }
  8737. },
  8738. /* BPF_JMP32 | BPF_JSGE | BPF_X */
  8739. {
  8740. "JMP32_JSGE_X",
  8741. .u.insns_int = {
  8742. BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
  8743. BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
  8744. BPF_JMP32_REG(BPF_JSGE, R0, R1, 2),
  8745. BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
  8746. BPF_JMP32_REG(BPF_JSGE, R0, R1, 1),
  8747. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8748. BPF_EXIT_INSN(),
  8749. },
  8750. INTERNAL,
  8751. { },
  8752. { { 0, -12345678 } }
  8753. },
  8754. /* BPF_JMP32 | BPF_JSLT | BPF_K */
  8755. {
  8756. "JMP32_JSLT_K: Small immediate",
  8757. .u.insns_int = {
  8758. BPF_ALU32_IMM(BPF_MOV, R0, -123),
  8759. BPF_JMP32_IMM(BPF_JSLT, R0, -123, 1),
  8760. BPF_JMP32_IMM(BPF_JSLT, R0, -122, 1),
  8761. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8762. BPF_EXIT_INSN(),
  8763. },
  8764. INTERNAL,
  8765. { },
  8766. { { 0, -123 } }
  8767. },
  8768. {
  8769. "JMP32_JSLT_K: Large immediate",
  8770. .u.insns_int = {
  8771. BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
  8772. BPF_JMP32_IMM(BPF_JSLT, R0, -12345678, 1),
  8773. BPF_JMP32_IMM(BPF_JSLT, R0, -12345677, 1),
  8774. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8775. BPF_EXIT_INSN(),
  8776. },
  8777. INTERNAL,
  8778. { },
  8779. { { 0, -12345678 } }
  8780. },
  8781. /* BPF_JMP32 | BPF_JSLT | BPF_X */
  8782. {
  8783. "JMP32_JSLT_X",
  8784. .u.insns_int = {
  8785. BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
  8786. BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
  8787. BPF_JMP32_REG(BPF_JSLT, R0, R1, 2),
  8788. BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
  8789. BPF_JMP32_REG(BPF_JSLT, R0, R1, 1),
  8790. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8791. BPF_EXIT_INSN(),
  8792. },
  8793. INTERNAL,
  8794. { },
  8795. { { 0, -12345678 } }
  8796. },
  8797. /* BPF_JMP32 | BPF_JSLE | BPF_K */
  8798. {
  8799. "JMP32_JSLE_K: Small immediate",
  8800. .u.insns_int = {
  8801. BPF_ALU32_IMM(BPF_MOV, R0, -123),
  8802. BPF_JMP32_IMM(BPF_JSLE, R0, -124, 1),
  8803. BPF_JMP32_IMM(BPF_JSLE, R0, -123, 1),
  8804. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8805. BPF_EXIT_INSN(),
  8806. },
  8807. INTERNAL,
  8808. { },
  8809. { { 0, -123 } }
  8810. },
  8811. {
  8812. "JMP32_JSLE_K: Large immediate",
  8813. .u.insns_int = {
  8814. BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
  8815. BPF_JMP32_IMM(BPF_JSLE, R0, -12345679, 1),
  8816. BPF_JMP32_IMM(BPF_JSLE, R0, -12345678, 1),
  8817. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8818. BPF_EXIT_INSN(),
  8819. },
  8820. INTERNAL,
  8821. { },
  8822. { { 0, -12345678 } }
  8823. },
  8824. /* BPF_JMP32 | BPF_JSLE | BPF_K */
  8825. {
  8826. "JMP32_JSLE_X",
  8827. .u.insns_int = {
  8828. BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
  8829. BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
  8830. BPF_JMP32_REG(BPF_JSLE, R0, R1, 2),
  8831. BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
  8832. BPF_JMP32_REG(BPF_JSLE, R0, R1, 1),
  8833. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8834. BPF_EXIT_INSN(),
  8835. },
  8836. INTERNAL,
  8837. { },
  8838. { { 0, -12345678 } }
  8839. },
  8840. /* BPF_JMP | BPF_EXIT */
  8841. {
  8842. "JMP_EXIT",
  8843. .u.insns_int = {
  8844. BPF_ALU32_IMM(BPF_MOV, R0, 0x4711),
  8845. BPF_EXIT_INSN(),
  8846. BPF_ALU32_IMM(BPF_MOV, R0, 0x4712),
  8847. },
  8848. INTERNAL,
  8849. { },
  8850. { { 0, 0x4711 } },
  8851. },
  8852. /* BPF_JMP | BPF_JA */
  8853. {
  8854. "JMP_JA: Unconditional jump: if (true) return 1",
  8855. .u.insns_int = {
  8856. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8857. BPF_JMP_IMM(BPF_JA, 0, 0, 1),
  8858. BPF_EXIT_INSN(),
  8859. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  8860. BPF_EXIT_INSN(),
  8861. },
  8862. INTERNAL,
  8863. { },
  8864. { { 0, 1 } },
  8865. },
  8866. /* BPF_JMP | BPF_JSLT | BPF_K */
  8867. {
  8868. "JMP_JSLT_K: Signed jump: if (-2 < -1) return 1",
  8869. .u.insns_int = {
  8870. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8871. BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
  8872. BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
  8873. BPF_EXIT_INSN(),
  8874. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  8875. BPF_EXIT_INSN(),
  8876. },
  8877. INTERNAL,
  8878. { },
  8879. { { 0, 1 } },
  8880. },
  8881. {
  8882. "JMP_JSLT_K: Signed jump: if (-1 < -1) return 0",
  8883. .u.insns_int = {
  8884. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  8885. BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
  8886. BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
  8887. BPF_EXIT_INSN(),
  8888. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8889. BPF_EXIT_INSN(),
  8890. },
  8891. INTERNAL,
  8892. { },
  8893. { { 0, 1 } },
  8894. },
  8895. /* BPF_JMP | BPF_JSGT | BPF_K */
  8896. {
  8897. "JMP_JSGT_K: Signed jump: if (-1 > -2) return 1",
  8898. .u.insns_int = {
  8899. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8900. BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
  8901. BPF_JMP_IMM(BPF_JSGT, R1, -2, 1),
  8902. BPF_EXIT_INSN(),
  8903. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  8904. BPF_EXIT_INSN(),
  8905. },
  8906. INTERNAL,
  8907. { },
  8908. { { 0, 1 } },
  8909. },
  8910. {
  8911. "JMP_JSGT_K: Signed jump: if (-1 > -1) return 0",
  8912. .u.insns_int = {
  8913. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  8914. BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
  8915. BPF_JMP_IMM(BPF_JSGT, R1, -1, 1),
  8916. BPF_EXIT_INSN(),
  8917. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8918. BPF_EXIT_INSN(),
  8919. },
  8920. INTERNAL,
  8921. { },
  8922. { { 0, 1 } },
  8923. },
  8924. /* BPF_JMP | BPF_JSLE | BPF_K */
  8925. {
  8926. "JMP_JSLE_K: Signed jump: if (-2 <= -1) return 1",
  8927. .u.insns_int = {
  8928. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8929. BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
  8930. BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
  8931. BPF_EXIT_INSN(),
  8932. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  8933. BPF_EXIT_INSN(),
  8934. },
  8935. INTERNAL,
  8936. { },
  8937. { { 0, 1 } },
  8938. },
  8939. {
  8940. "JMP_JSLE_K: Signed jump: if (-1 <= -1) return 1",
  8941. .u.insns_int = {
  8942. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8943. BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
  8944. BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
  8945. BPF_EXIT_INSN(),
  8946. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  8947. BPF_EXIT_INSN(),
  8948. },
  8949. INTERNAL,
  8950. { },
  8951. { { 0, 1 } },
  8952. },
  8953. {
  8954. "JMP_JSLE_K: Signed jump: value walk 1",
  8955. .u.insns_int = {
  8956. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8957. BPF_LD_IMM64(R1, 3),
  8958. BPF_JMP_IMM(BPF_JSLE, R1, 0, 6),
  8959. BPF_ALU64_IMM(BPF_SUB, R1, 1),
  8960. BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
  8961. BPF_ALU64_IMM(BPF_SUB, R1, 1),
  8962. BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
  8963. BPF_ALU64_IMM(BPF_SUB, R1, 1),
  8964. BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
  8965. BPF_EXIT_INSN(), /* bad exit */
  8966. BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
  8967. BPF_EXIT_INSN(),
  8968. },
  8969. INTERNAL,
  8970. { },
  8971. { { 0, 1 } },
  8972. },
  8973. {
  8974. "JMP_JSLE_K: Signed jump: value walk 2",
  8975. .u.insns_int = {
  8976. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8977. BPF_LD_IMM64(R1, 3),
  8978. BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
  8979. BPF_ALU64_IMM(BPF_SUB, R1, 2),
  8980. BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
  8981. BPF_ALU64_IMM(BPF_SUB, R1, 2),
  8982. BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
  8983. BPF_EXIT_INSN(), /* bad exit */
  8984. BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
  8985. BPF_EXIT_INSN(),
  8986. },
  8987. INTERNAL,
  8988. { },
  8989. { { 0, 1 } },
  8990. },
  8991. /* BPF_JMP | BPF_JSGE | BPF_K */
  8992. {
  8993. "JMP_JSGE_K: Signed jump: if (-1 >= -2) return 1",
  8994. .u.insns_int = {
  8995. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  8996. BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
  8997. BPF_JMP_IMM(BPF_JSGE, R1, -2, 1),
  8998. BPF_EXIT_INSN(),
  8999. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9000. BPF_EXIT_INSN(),
  9001. },
  9002. INTERNAL,
  9003. { },
  9004. { { 0, 1 } },
  9005. },
  9006. {
  9007. "JMP_JSGE_K: Signed jump: if (-1 >= -1) return 1",
  9008. .u.insns_int = {
  9009. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9010. BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
  9011. BPF_JMP_IMM(BPF_JSGE, R1, -1, 1),
  9012. BPF_EXIT_INSN(),
  9013. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9014. BPF_EXIT_INSN(),
  9015. },
  9016. INTERNAL,
  9017. { },
  9018. { { 0, 1 } },
  9019. },
  9020. {
  9021. "JMP_JSGE_K: Signed jump: value walk 1",
  9022. .u.insns_int = {
  9023. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9024. BPF_LD_IMM64(R1, -3),
  9025. BPF_JMP_IMM(BPF_JSGE, R1, 0, 6),
  9026. BPF_ALU64_IMM(BPF_ADD, R1, 1),
  9027. BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
  9028. BPF_ALU64_IMM(BPF_ADD, R1, 1),
  9029. BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
  9030. BPF_ALU64_IMM(BPF_ADD, R1, 1),
  9031. BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
  9032. BPF_EXIT_INSN(), /* bad exit */
  9033. BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
  9034. BPF_EXIT_INSN(),
  9035. },
  9036. INTERNAL,
  9037. { },
  9038. { { 0, 1 } },
  9039. },
  9040. {
  9041. "JMP_JSGE_K: Signed jump: value walk 2",
  9042. .u.insns_int = {
  9043. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9044. BPF_LD_IMM64(R1, -3),
  9045. BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
  9046. BPF_ALU64_IMM(BPF_ADD, R1, 2),
  9047. BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
  9048. BPF_ALU64_IMM(BPF_ADD, R1, 2),
  9049. BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
  9050. BPF_EXIT_INSN(), /* bad exit */
  9051. BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
  9052. BPF_EXIT_INSN(),
  9053. },
  9054. INTERNAL,
  9055. { },
  9056. { { 0, 1 } },
  9057. },
  9058. /* BPF_JMP | BPF_JGT | BPF_K */
  9059. {
  9060. "JMP_JGT_K: if (3 > 2) return 1",
  9061. .u.insns_int = {
  9062. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9063. BPF_LD_IMM64(R1, 3),
  9064. BPF_JMP_IMM(BPF_JGT, R1, 2, 1),
  9065. BPF_EXIT_INSN(),
  9066. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9067. BPF_EXIT_INSN(),
  9068. },
  9069. INTERNAL,
  9070. { },
  9071. { { 0, 1 } },
  9072. },
  9073. {
  9074. "JMP_JGT_K: Unsigned jump: if (-1 > 1) return 1",
  9075. .u.insns_int = {
  9076. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9077. BPF_LD_IMM64(R1, -1),
  9078. BPF_JMP_IMM(BPF_JGT, R1, 1, 1),
  9079. BPF_EXIT_INSN(),
  9080. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9081. BPF_EXIT_INSN(),
  9082. },
  9083. INTERNAL,
  9084. { },
  9085. { { 0, 1 } },
  9086. },
  9087. /* BPF_JMP | BPF_JLT | BPF_K */
  9088. {
  9089. "JMP_JLT_K: if (2 < 3) return 1",
  9090. .u.insns_int = {
  9091. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9092. BPF_LD_IMM64(R1, 2),
  9093. BPF_JMP_IMM(BPF_JLT, R1, 3, 1),
  9094. BPF_EXIT_INSN(),
  9095. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9096. BPF_EXIT_INSN(),
  9097. },
  9098. INTERNAL,
  9099. { },
  9100. { { 0, 1 } },
  9101. },
  9102. {
  9103. "JMP_JGT_K: Unsigned jump: if (1 < -1) return 1",
  9104. .u.insns_int = {
  9105. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9106. BPF_LD_IMM64(R1, 1),
  9107. BPF_JMP_IMM(BPF_JLT, R1, -1, 1),
  9108. BPF_EXIT_INSN(),
  9109. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9110. BPF_EXIT_INSN(),
  9111. },
  9112. INTERNAL,
  9113. { },
  9114. { { 0, 1 } },
  9115. },
  9116. /* BPF_JMP | BPF_JGE | BPF_K */
  9117. {
  9118. "JMP_JGE_K: if (3 >= 2) return 1",
  9119. .u.insns_int = {
  9120. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9121. BPF_LD_IMM64(R1, 3),
  9122. BPF_JMP_IMM(BPF_JGE, R1, 2, 1),
  9123. BPF_EXIT_INSN(),
  9124. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9125. BPF_EXIT_INSN(),
  9126. },
  9127. INTERNAL,
  9128. { },
  9129. { { 0, 1 } },
  9130. },
  9131. /* BPF_JMP | BPF_JLE | BPF_K */
  9132. {
  9133. "JMP_JLE_K: if (2 <= 3) return 1",
  9134. .u.insns_int = {
  9135. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9136. BPF_LD_IMM64(R1, 2),
  9137. BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
  9138. BPF_EXIT_INSN(),
  9139. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9140. BPF_EXIT_INSN(),
  9141. },
  9142. INTERNAL,
  9143. { },
  9144. { { 0, 1 } },
  9145. },
  9146. /* BPF_JMP | BPF_JGT | BPF_K jump backwards */
  9147. {
  9148. "JMP_JGT_K: if (3 > 2) return 1 (jump backwards)",
  9149. .u.insns_int = {
  9150. BPF_JMP_IMM(BPF_JA, 0, 0, 2), /* goto start */
  9151. BPF_ALU32_IMM(BPF_MOV, R0, 1), /* out: */
  9152. BPF_EXIT_INSN(),
  9153. BPF_ALU32_IMM(BPF_MOV, R0, 0), /* start: */
  9154. BPF_LD_IMM64(R1, 3), /* note: this takes 2 insns */
  9155. BPF_JMP_IMM(BPF_JGT, R1, 2, -6), /* goto out */
  9156. BPF_EXIT_INSN(),
  9157. },
  9158. INTERNAL,
  9159. { },
  9160. { { 0, 1 } },
  9161. },
  9162. {
  9163. "JMP_JGE_K: if (3 >= 3) return 1",
  9164. .u.insns_int = {
  9165. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9166. BPF_LD_IMM64(R1, 3),
  9167. BPF_JMP_IMM(BPF_JGE, R1, 3, 1),
  9168. BPF_EXIT_INSN(),
  9169. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9170. BPF_EXIT_INSN(),
  9171. },
  9172. INTERNAL,
  9173. { },
  9174. { { 0, 1 } },
  9175. },
  9176. /* BPF_JMP | BPF_JLT | BPF_K jump backwards */
  9177. {
  9178. "JMP_JGT_K: if (2 < 3) return 1 (jump backwards)",
  9179. .u.insns_int = {
  9180. BPF_JMP_IMM(BPF_JA, 0, 0, 2), /* goto start */
  9181. BPF_ALU32_IMM(BPF_MOV, R0, 1), /* out: */
  9182. BPF_EXIT_INSN(),
  9183. BPF_ALU32_IMM(BPF_MOV, R0, 0), /* start: */
  9184. BPF_LD_IMM64(R1, 2), /* note: this takes 2 insns */
  9185. BPF_JMP_IMM(BPF_JLT, R1, 3, -6), /* goto out */
  9186. BPF_EXIT_INSN(),
  9187. },
  9188. INTERNAL,
  9189. { },
  9190. { { 0, 1 } },
  9191. },
  9192. {
  9193. "JMP_JLE_K: if (3 <= 3) return 1",
  9194. .u.insns_int = {
  9195. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9196. BPF_LD_IMM64(R1, 3),
  9197. BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
  9198. BPF_EXIT_INSN(),
  9199. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9200. BPF_EXIT_INSN(),
  9201. },
  9202. INTERNAL,
  9203. { },
  9204. { { 0, 1 } },
  9205. },
  9206. /* BPF_JMP | BPF_JNE | BPF_K */
  9207. {
  9208. "JMP_JNE_K: if (3 != 2) return 1",
  9209. .u.insns_int = {
  9210. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9211. BPF_LD_IMM64(R1, 3),
  9212. BPF_JMP_IMM(BPF_JNE, R1, 2, 1),
  9213. BPF_EXIT_INSN(),
  9214. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9215. BPF_EXIT_INSN(),
  9216. },
  9217. INTERNAL,
  9218. { },
  9219. { { 0, 1 } },
  9220. },
  9221. /* BPF_JMP | BPF_JEQ | BPF_K */
  9222. {
  9223. "JMP_JEQ_K: if (3 == 3) return 1",
  9224. .u.insns_int = {
  9225. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9226. BPF_LD_IMM64(R1, 3),
  9227. BPF_JMP_IMM(BPF_JEQ, R1, 3, 1),
  9228. BPF_EXIT_INSN(),
  9229. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9230. BPF_EXIT_INSN(),
  9231. },
  9232. INTERNAL,
  9233. { },
  9234. { { 0, 1 } },
  9235. },
  9236. /* BPF_JMP | BPF_JSET | BPF_K */
  9237. {
  9238. "JMP_JSET_K: if (0x3 & 0x2) return 1",
  9239. .u.insns_int = {
  9240. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9241. BPF_LD_IMM64(R1, 3),
  9242. BPF_JMP_IMM(BPF_JSET, R1, 2, 1),
  9243. BPF_EXIT_INSN(),
  9244. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9245. BPF_EXIT_INSN(),
  9246. },
  9247. INTERNAL,
  9248. { },
  9249. { { 0, 1 } },
  9250. },
  9251. {
  9252. "JMP_JSET_K: if (0x3 & 0xffffffff) return 1",
  9253. .u.insns_int = {
  9254. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9255. BPF_LD_IMM64(R1, 3),
  9256. BPF_JMP_IMM(BPF_JSET, R1, 0xffffffff, 1),
  9257. BPF_EXIT_INSN(),
  9258. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9259. BPF_EXIT_INSN(),
  9260. },
  9261. INTERNAL,
  9262. { },
  9263. { { 0, 1 } },
  9264. },
  9265. /* BPF_JMP | BPF_JSGT | BPF_X */
  9266. {
  9267. "JMP_JSGT_X: Signed jump: if (-1 > -2) return 1",
  9268. .u.insns_int = {
  9269. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9270. BPF_LD_IMM64(R1, -1),
  9271. BPF_LD_IMM64(R2, -2),
  9272. BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
  9273. BPF_EXIT_INSN(),
  9274. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9275. BPF_EXIT_INSN(),
  9276. },
  9277. INTERNAL,
  9278. { },
  9279. { { 0, 1 } },
  9280. },
  9281. {
  9282. "JMP_JSGT_X: Signed jump: if (-1 > -1) return 0",
  9283. .u.insns_int = {
  9284. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9285. BPF_LD_IMM64(R1, -1),
  9286. BPF_LD_IMM64(R2, -1),
  9287. BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
  9288. BPF_EXIT_INSN(),
  9289. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9290. BPF_EXIT_INSN(),
  9291. },
  9292. INTERNAL,
  9293. { },
  9294. { { 0, 1 } },
  9295. },
  9296. /* BPF_JMP | BPF_JSLT | BPF_X */
  9297. {
  9298. "JMP_JSLT_X: Signed jump: if (-2 < -1) return 1",
  9299. .u.insns_int = {
  9300. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9301. BPF_LD_IMM64(R1, -1),
  9302. BPF_LD_IMM64(R2, -2),
  9303. BPF_JMP_REG(BPF_JSLT, R2, R1, 1),
  9304. BPF_EXIT_INSN(),
  9305. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9306. BPF_EXIT_INSN(),
  9307. },
  9308. INTERNAL,
  9309. { },
  9310. { { 0, 1 } },
  9311. },
  9312. {
  9313. "JMP_JSLT_X: Signed jump: if (-1 < -1) return 0",
  9314. .u.insns_int = {
  9315. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9316. BPF_LD_IMM64(R1, -1),
  9317. BPF_LD_IMM64(R2, -1),
  9318. BPF_JMP_REG(BPF_JSLT, R1, R2, 1),
  9319. BPF_EXIT_INSN(),
  9320. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9321. BPF_EXIT_INSN(),
  9322. },
  9323. INTERNAL,
  9324. { },
  9325. { { 0, 1 } },
  9326. },
  9327. /* BPF_JMP | BPF_JSGE | BPF_X */
  9328. {
  9329. "JMP_JSGE_X: Signed jump: if (-1 >= -2) return 1",
  9330. .u.insns_int = {
  9331. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9332. BPF_LD_IMM64(R1, -1),
  9333. BPF_LD_IMM64(R2, -2),
  9334. BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
  9335. BPF_EXIT_INSN(),
  9336. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9337. BPF_EXIT_INSN(),
  9338. },
  9339. INTERNAL,
  9340. { },
  9341. { { 0, 1 } },
  9342. },
  9343. {
  9344. "JMP_JSGE_X: Signed jump: if (-1 >= -1) return 1",
  9345. .u.insns_int = {
  9346. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9347. BPF_LD_IMM64(R1, -1),
  9348. BPF_LD_IMM64(R2, -1),
  9349. BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
  9350. BPF_EXIT_INSN(),
  9351. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9352. BPF_EXIT_INSN(),
  9353. },
  9354. INTERNAL,
  9355. { },
  9356. { { 0, 1 } },
  9357. },
  9358. /* BPF_JMP | BPF_JSLE | BPF_X */
  9359. {
  9360. "JMP_JSLE_X: Signed jump: if (-2 <= -1) return 1",
  9361. .u.insns_int = {
  9362. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9363. BPF_LD_IMM64(R1, -1),
  9364. BPF_LD_IMM64(R2, -2),
  9365. BPF_JMP_REG(BPF_JSLE, R2, R1, 1),
  9366. BPF_EXIT_INSN(),
  9367. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9368. BPF_EXIT_INSN(),
  9369. },
  9370. INTERNAL,
  9371. { },
  9372. { { 0, 1 } },
  9373. },
  9374. {
  9375. "JMP_JSLE_X: Signed jump: if (-1 <= -1) return 1",
  9376. .u.insns_int = {
  9377. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9378. BPF_LD_IMM64(R1, -1),
  9379. BPF_LD_IMM64(R2, -1),
  9380. BPF_JMP_REG(BPF_JSLE, R1, R2, 1),
  9381. BPF_EXIT_INSN(),
  9382. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9383. BPF_EXIT_INSN(),
  9384. },
  9385. INTERNAL,
  9386. { },
  9387. { { 0, 1 } },
  9388. },
  9389. /* BPF_JMP | BPF_JGT | BPF_X */
  9390. {
  9391. "JMP_JGT_X: if (3 > 2) return 1",
  9392. .u.insns_int = {
  9393. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9394. BPF_LD_IMM64(R1, 3),
  9395. BPF_LD_IMM64(R2, 2),
  9396. BPF_JMP_REG(BPF_JGT, R1, R2, 1),
  9397. BPF_EXIT_INSN(),
  9398. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9399. BPF_EXIT_INSN(),
  9400. },
  9401. INTERNAL,
  9402. { },
  9403. { { 0, 1 } },
  9404. },
  9405. {
  9406. "JMP_JGT_X: Unsigned jump: if (-1 > 1) return 1",
  9407. .u.insns_int = {
  9408. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9409. BPF_LD_IMM64(R1, -1),
  9410. BPF_LD_IMM64(R2, 1),
  9411. BPF_JMP_REG(BPF_JGT, R1, R2, 1),
  9412. BPF_EXIT_INSN(),
  9413. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9414. BPF_EXIT_INSN(),
  9415. },
  9416. INTERNAL,
  9417. { },
  9418. { { 0, 1 } },
  9419. },
  9420. /* BPF_JMP | BPF_JLT | BPF_X */
  9421. {
  9422. "JMP_JLT_X: if (2 < 3) return 1",
  9423. .u.insns_int = {
  9424. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9425. BPF_LD_IMM64(R1, 3),
  9426. BPF_LD_IMM64(R2, 2),
  9427. BPF_JMP_REG(BPF_JLT, R2, R1, 1),
  9428. BPF_EXIT_INSN(),
  9429. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9430. BPF_EXIT_INSN(),
  9431. },
  9432. INTERNAL,
  9433. { },
  9434. { { 0, 1 } },
  9435. },
  9436. {
  9437. "JMP_JLT_X: Unsigned jump: if (1 < -1) return 1",
  9438. .u.insns_int = {
  9439. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9440. BPF_LD_IMM64(R1, -1),
  9441. BPF_LD_IMM64(R2, 1),
  9442. BPF_JMP_REG(BPF_JLT, R2, R1, 1),
  9443. BPF_EXIT_INSN(),
  9444. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9445. BPF_EXIT_INSN(),
  9446. },
  9447. INTERNAL,
  9448. { },
  9449. { { 0, 1 } },
  9450. },
  9451. /* BPF_JMP | BPF_JGE | BPF_X */
  9452. {
  9453. "JMP_JGE_X: if (3 >= 2) return 1",
  9454. .u.insns_int = {
  9455. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9456. BPF_LD_IMM64(R1, 3),
  9457. BPF_LD_IMM64(R2, 2),
  9458. BPF_JMP_REG(BPF_JGE, R1, R2, 1),
  9459. BPF_EXIT_INSN(),
  9460. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9461. BPF_EXIT_INSN(),
  9462. },
  9463. INTERNAL,
  9464. { },
  9465. { { 0, 1 } },
  9466. },
  9467. {
  9468. "JMP_JGE_X: if (3 >= 3) return 1",
  9469. .u.insns_int = {
  9470. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9471. BPF_LD_IMM64(R1, 3),
  9472. BPF_LD_IMM64(R2, 3),
  9473. BPF_JMP_REG(BPF_JGE, R1, R2, 1),
  9474. BPF_EXIT_INSN(),
  9475. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9476. BPF_EXIT_INSN(),
  9477. },
  9478. INTERNAL,
  9479. { },
  9480. { { 0, 1 } },
  9481. },
  9482. /* BPF_JMP | BPF_JLE | BPF_X */
  9483. {
  9484. "JMP_JLE_X: if (2 <= 3) return 1",
  9485. .u.insns_int = {
  9486. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9487. BPF_LD_IMM64(R1, 3),
  9488. BPF_LD_IMM64(R2, 2),
  9489. BPF_JMP_REG(BPF_JLE, R2, R1, 1),
  9490. BPF_EXIT_INSN(),
  9491. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9492. BPF_EXIT_INSN(),
  9493. },
  9494. INTERNAL,
  9495. { },
  9496. { { 0, 1 } },
  9497. },
  9498. {
  9499. "JMP_JLE_X: if (3 <= 3) return 1",
  9500. .u.insns_int = {
  9501. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9502. BPF_LD_IMM64(R1, 3),
  9503. BPF_LD_IMM64(R2, 3),
  9504. BPF_JMP_REG(BPF_JLE, R1, R2, 1),
  9505. BPF_EXIT_INSN(),
  9506. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9507. BPF_EXIT_INSN(),
  9508. },
  9509. INTERNAL,
  9510. { },
  9511. { { 0, 1 } },
  9512. },
  9513. {
  9514. /* Mainly testing JIT + imm64 here. */
  9515. "JMP_JGE_X: ldimm64 test 1",
  9516. .u.insns_int = {
  9517. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9518. BPF_LD_IMM64(R1, 3),
  9519. BPF_LD_IMM64(R2, 2),
  9520. BPF_JMP_REG(BPF_JGE, R1, R2, 2),
  9521. BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
  9522. BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
  9523. BPF_EXIT_INSN(),
  9524. },
  9525. INTERNAL,
  9526. { },
  9527. { { 0, 0xeeeeeeeeU } },
  9528. },
  9529. {
  9530. "JMP_JGE_X: ldimm64 test 2",
  9531. .u.insns_int = {
  9532. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9533. BPF_LD_IMM64(R1, 3),
  9534. BPF_LD_IMM64(R2, 2),
  9535. BPF_JMP_REG(BPF_JGE, R1, R2, 0),
  9536. BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
  9537. BPF_EXIT_INSN(),
  9538. },
  9539. INTERNAL,
  9540. { },
  9541. { { 0, 0xffffffffU } },
  9542. },
  9543. {
  9544. "JMP_JGE_X: ldimm64 test 3",
  9545. .u.insns_int = {
  9546. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9547. BPF_LD_IMM64(R1, 3),
  9548. BPF_LD_IMM64(R2, 2),
  9549. BPF_JMP_REG(BPF_JGE, R1, R2, 4),
  9550. BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
  9551. BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
  9552. BPF_EXIT_INSN(),
  9553. },
  9554. INTERNAL,
  9555. { },
  9556. { { 0, 1 } },
  9557. },
  9558. {
  9559. "JMP_JLE_X: ldimm64 test 1",
  9560. .u.insns_int = {
  9561. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9562. BPF_LD_IMM64(R1, 3),
  9563. BPF_LD_IMM64(R2, 2),
  9564. BPF_JMP_REG(BPF_JLE, R2, R1, 2),
  9565. BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
  9566. BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
  9567. BPF_EXIT_INSN(),
  9568. },
  9569. INTERNAL,
  9570. { },
  9571. { { 0, 0xeeeeeeeeU } },
  9572. },
  9573. {
  9574. "JMP_JLE_X: ldimm64 test 2",
  9575. .u.insns_int = {
  9576. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9577. BPF_LD_IMM64(R1, 3),
  9578. BPF_LD_IMM64(R2, 2),
  9579. BPF_JMP_REG(BPF_JLE, R2, R1, 0),
  9580. BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
  9581. BPF_EXIT_INSN(),
  9582. },
  9583. INTERNAL,
  9584. { },
  9585. { { 0, 0xffffffffU } },
  9586. },
  9587. {
  9588. "JMP_JLE_X: ldimm64 test 3",
  9589. .u.insns_int = {
  9590. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9591. BPF_LD_IMM64(R1, 3),
  9592. BPF_LD_IMM64(R2, 2),
  9593. BPF_JMP_REG(BPF_JLE, R2, R1, 4),
  9594. BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
  9595. BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
  9596. BPF_EXIT_INSN(),
  9597. },
  9598. INTERNAL,
  9599. { },
  9600. { { 0, 1 } },
  9601. },
  9602. /* BPF_JMP | BPF_JNE | BPF_X */
  9603. {
  9604. "JMP_JNE_X: if (3 != 2) return 1",
  9605. .u.insns_int = {
  9606. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9607. BPF_LD_IMM64(R1, 3),
  9608. BPF_LD_IMM64(R2, 2),
  9609. BPF_JMP_REG(BPF_JNE, R1, R2, 1),
  9610. BPF_EXIT_INSN(),
  9611. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9612. BPF_EXIT_INSN(),
  9613. },
  9614. INTERNAL,
  9615. { },
  9616. { { 0, 1 } },
  9617. },
  9618. /* BPF_JMP | BPF_JEQ | BPF_X */
  9619. {
  9620. "JMP_JEQ_X: if (3 == 3) return 1",
  9621. .u.insns_int = {
  9622. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9623. BPF_LD_IMM64(R1, 3),
  9624. BPF_LD_IMM64(R2, 3),
  9625. BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
  9626. BPF_EXIT_INSN(),
  9627. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9628. BPF_EXIT_INSN(),
  9629. },
  9630. INTERNAL,
  9631. { },
  9632. { { 0, 1 } },
  9633. },
  9634. /* BPF_JMP | BPF_JSET | BPF_X */
  9635. {
  9636. "JMP_JSET_X: if (0x3 & 0x2) return 1",
  9637. .u.insns_int = {
  9638. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9639. BPF_LD_IMM64(R1, 3),
  9640. BPF_LD_IMM64(R2, 2),
  9641. BPF_JMP_REG(BPF_JSET, R1, R2, 1),
  9642. BPF_EXIT_INSN(),
  9643. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9644. BPF_EXIT_INSN(),
  9645. },
  9646. INTERNAL,
  9647. { },
  9648. { { 0, 1 } },
  9649. },
  9650. {
  9651. "JMP_JSET_X: if (0x3 & 0xffffffff) return 1",
  9652. .u.insns_int = {
  9653. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  9654. BPF_LD_IMM64(R1, 3),
  9655. BPF_LD_IMM64(R2, 0xffffffff),
  9656. BPF_JMP_REG(BPF_JSET, R1, R2, 1),
  9657. BPF_EXIT_INSN(),
  9658. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  9659. BPF_EXIT_INSN(),
  9660. },
  9661. INTERNAL,
  9662. { },
  9663. { { 0, 1 } },
  9664. },
  9665. {
  9666. "JMP_JA: Jump, gap, jump, ...",
  9667. { },
  9668. CLASSIC | FLAG_NO_DATA,
  9669. { },
  9670. { { 0, 0xababcbac } },
  9671. .fill_helper = bpf_fill_ja,
  9672. },
  9673. { /* Mainly checking JIT here. */
  9674. "BPF_MAXINSNS: Maximum possible literals",
  9675. { },
  9676. CLASSIC | FLAG_NO_DATA,
  9677. { },
  9678. { { 0, 0xffffffff } },
  9679. .fill_helper = bpf_fill_maxinsns1,
  9680. },
  9681. { /* Mainly checking JIT here. */
  9682. "BPF_MAXINSNS: Single literal",
  9683. { },
  9684. CLASSIC | FLAG_NO_DATA,
  9685. { },
  9686. { { 0, 0xfefefefe } },
  9687. .fill_helper = bpf_fill_maxinsns2,
  9688. },
  9689. { /* Mainly checking JIT here. */
  9690. "BPF_MAXINSNS: Run/add until end",
  9691. { },
  9692. CLASSIC | FLAG_NO_DATA,
  9693. { },
  9694. { { 0, 0x947bf368 } },
  9695. .fill_helper = bpf_fill_maxinsns3,
  9696. },
  9697. {
  9698. "BPF_MAXINSNS: Too many instructions",
  9699. { },
  9700. CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
  9701. { },
  9702. { },
  9703. .fill_helper = bpf_fill_maxinsns4,
  9704. .expected_errcode = -EINVAL,
  9705. },
  9706. { /* Mainly checking JIT here. */
  9707. "BPF_MAXINSNS: Very long jump",
  9708. { },
  9709. CLASSIC | FLAG_NO_DATA,
  9710. { },
  9711. { { 0, 0xabababab } },
  9712. .fill_helper = bpf_fill_maxinsns5,
  9713. },
  9714. { /* Mainly checking JIT here. */
  9715. "BPF_MAXINSNS: Ctx heavy transformations",
  9716. { },
  9717. CLASSIC,
  9718. { },
  9719. {
  9720. { 1, SKB_VLAN_PRESENT },
  9721. { 10, SKB_VLAN_PRESENT }
  9722. },
  9723. .fill_helper = bpf_fill_maxinsns6,
  9724. },
  9725. { /* Mainly checking JIT here. */
  9726. "BPF_MAXINSNS: Call heavy transformations",
  9727. { },
  9728. CLASSIC | FLAG_NO_DATA,
  9729. { },
  9730. { { 1, 0 }, { 10, 0 } },
  9731. .fill_helper = bpf_fill_maxinsns7,
  9732. },
  9733. { /* Mainly checking JIT here. */
  9734. "BPF_MAXINSNS: Jump heavy test",
  9735. { },
  9736. CLASSIC | FLAG_NO_DATA,
  9737. { },
  9738. { { 0, 0xffffffff } },
  9739. .fill_helper = bpf_fill_maxinsns8,
  9740. },
  9741. { /* Mainly checking JIT here. */
  9742. "BPF_MAXINSNS: Very long jump backwards",
  9743. { },
  9744. INTERNAL | FLAG_NO_DATA,
  9745. { },
  9746. { { 0, 0xcbababab } },
  9747. .fill_helper = bpf_fill_maxinsns9,
  9748. },
  9749. { /* Mainly checking JIT here. */
  9750. "BPF_MAXINSNS: Edge hopping nuthouse",
  9751. { },
  9752. INTERNAL | FLAG_NO_DATA,
  9753. { },
  9754. { { 0, 0xabababac } },
  9755. .fill_helper = bpf_fill_maxinsns10,
  9756. },
  9757. {
  9758. "BPF_MAXINSNS: Jump, gap, jump, ...",
  9759. { },
  9760. CLASSIC | FLAG_NO_DATA,
  9761. { },
  9762. { { 0, 0xababcbac } },
  9763. .fill_helper = bpf_fill_maxinsns11,
  9764. },
  9765. {
  9766. "BPF_MAXINSNS: jump over MSH",
  9767. { },
  9768. CLASSIC | FLAG_EXPECTED_FAIL,
  9769. { 0xfa, 0xfb, 0xfc, 0xfd, },
  9770. { { 4, 0xabababab } },
  9771. .fill_helper = bpf_fill_maxinsns12,
  9772. .expected_errcode = -EINVAL,
  9773. },
  9774. {
  9775. "BPF_MAXINSNS: exec all MSH",
  9776. { },
  9777. CLASSIC,
  9778. { 0xfa, 0xfb, 0xfc, 0xfd, },
  9779. { { 4, 0xababab83 } },
  9780. .fill_helper = bpf_fill_maxinsns13,
  9781. },
  9782. {
  9783. "BPF_MAXINSNS: ld_abs+get_processor_id",
  9784. { },
  9785. CLASSIC,
  9786. { },
  9787. { { 1, 0xbee } },
  9788. .fill_helper = bpf_fill_ld_abs_get_processor_id,
  9789. },
  9790. /*
  9791. * LD_IND / LD_ABS on fragmented SKBs
  9792. */
  9793. {
  9794. "LD_IND byte frag",
  9795. .u.insns = {
  9796. BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
  9797. BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x0),
  9798. BPF_STMT(BPF_RET | BPF_A, 0x0),
  9799. },
  9800. CLASSIC | FLAG_SKB_FRAG,
  9801. { },
  9802. { {0x40, 0x42} },
  9803. .frag_data = {
  9804. 0x42, 0x00, 0x00, 0x00,
  9805. 0x43, 0x44, 0x00, 0x00,
  9806. 0x21, 0x07, 0x19, 0x83,
  9807. },
  9808. },
  9809. {
  9810. "LD_IND halfword frag",
  9811. .u.insns = {
  9812. BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
  9813. BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x4),
  9814. BPF_STMT(BPF_RET | BPF_A, 0x0),
  9815. },
  9816. CLASSIC | FLAG_SKB_FRAG,
  9817. { },
  9818. { {0x40, 0x4344} },
  9819. .frag_data = {
  9820. 0x42, 0x00, 0x00, 0x00,
  9821. 0x43, 0x44, 0x00, 0x00,
  9822. 0x21, 0x07, 0x19, 0x83,
  9823. },
  9824. },
  9825. {
  9826. "LD_IND word frag",
  9827. .u.insns = {
  9828. BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
  9829. BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x8),
  9830. BPF_STMT(BPF_RET | BPF_A, 0x0),
  9831. },
  9832. CLASSIC | FLAG_SKB_FRAG,
  9833. { },
  9834. { {0x40, 0x21071983} },
  9835. .frag_data = {
  9836. 0x42, 0x00, 0x00, 0x00,
  9837. 0x43, 0x44, 0x00, 0x00,
  9838. 0x21, 0x07, 0x19, 0x83,
  9839. },
  9840. },
  9841. {
  9842. "LD_IND halfword mixed head/frag",
  9843. .u.insns = {
  9844. BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
  9845. BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x1),
  9846. BPF_STMT(BPF_RET | BPF_A, 0x0),
  9847. },
  9848. CLASSIC | FLAG_SKB_FRAG,
  9849. { [0x3e] = 0x25, [0x3f] = 0x05, },
  9850. { {0x40, 0x0519} },
  9851. .frag_data = { 0x19, 0x82 },
  9852. },
  9853. {
  9854. "LD_IND word mixed head/frag",
  9855. .u.insns = {
  9856. BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
  9857. BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x2),
  9858. BPF_STMT(BPF_RET | BPF_A, 0x0),
  9859. },
  9860. CLASSIC | FLAG_SKB_FRAG,
  9861. { [0x3e] = 0x25, [0x3f] = 0x05, },
  9862. { {0x40, 0x25051982} },
  9863. .frag_data = { 0x19, 0x82 },
  9864. },
  9865. {
  9866. "LD_ABS byte frag",
  9867. .u.insns = {
  9868. BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x40),
  9869. BPF_STMT(BPF_RET | BPF_A, 0x0),
  9870. },
  9871. CLASSIC | FLAG_SKB_FRAG,
  9872. { },
  9873. { {0x40, 0x42} },
  9874. .frag_data = {
  9875. 0x42, 0x00, 0x00, 0x00,
  9876. 0x43, 0x44, 0x00, 0x00,
  9877. 0x21, 0x07, 0x19, 0x83,
  9878. },
  9879. },
  9880. {
  9881. "LD_ABS halfword frag",
  9882. .u.insns = {
  9883. BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x44),
  9884. BPF_STMT(BPF_RET | BPF_A, 0x0),
  9885. },
  9886. CLASSIC | FLAG_SKB_FRAG,
  9887. { },
  9888. { {0x40, 0x4344} },
  9889. .frag_data = {
  9890. 0x42, 0x00, 0x00, 0x00,
  9891. 0x43, 0x44, 0x00, 0x00,
  9892. 0x21, 0x07, 0x19, 0x83,
  9893. },
  9894. },
  9895. {
  9896. "LD_ABS word frag",
  9897. .u.insns = {
  9898. BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x48),
  9899. BPF_STMT(BPF_RET | BPF_A, 0x0),
  9900. },
  9901. CLASSIC | FLAG_SKB_FRAG,
  9902. { },
  9903. { {0x40, 0x21071983} },
  9904. .frag_data = {
  9905. 0x42, 0x00, 0x00, 0x00,
  9906. 0x43, 0x44, 0x00, 0x00,
  9907. 0x21, 0x07, 0x19, 0x83,
  9908. },
  9909. },
  9910. {
  9911. "LD_ABS halfword mixed head/frag",
  9912. .u.insns = {
  9913. BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3f),
  9914. BPF_STMT(BPF_RET | BPF_A, 0x0),
  9915. },
  9916. CLASSIC | FLAG_SKB_FRAG,
  9917. { [0x3e] = 0x25, [0x3f] = 0x05, },
  9918. { {0x40, 0x0519} },
  9919. .frag_data = { 0x19, 0x82 },
  9920. },
  9921. {
  9922. "LD_ABS word mixed head/frag",
  9923. .u.insns = {
  9924. BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3e),
  9925. BPF_STMT(BPF_RET | BPF_A, 0x0),
  9926. },
  9927. CLASSIC | FLAG_SKB_FRAG,
  9928. { [0x3e] = 0x25, [0x3f] = 0x05, },
  9929. { {0x40, 0x25051982} },
  9930. .frag_data = { 0x19, 0x82 },
  9931. },
  9932. /*
  9933. * LD_IND / LD_ABS on non fragmented SKBs
  9934. */
  9935. {
  9936. /*
  9937. * this tests that the JIT/interpreter correctly resets X
  9938. * before using it in an LD_IND instruction.
  9939. */
  9940. "LD_IND byte default X",
  9941. .u.insns = {
  9942. BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
  9943. BPF_STMT(BPF_RET | BPF_A, 0x0),
  9944. },
  9945. CLASSIC,
  9946. { [0x1] = 0x42 },
  9947. { {0x40, 0x42 } },
  9948. },
  9949. {
  9950. "LD_IND byte positive offset",
  9951. .u.insns = {
  9952. BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
  9953. BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
  9954. BPF_STMT(BPF_RET | BPF_A, 0x0),
  9955. },
  9956. CLASSIC,
  9957. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  9958. { {0x40, 0x82 } },
  9959. },
  9960. {
  9961. "LD_IND byte negative offset",
  9962. .u.insns = {
  9963. BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
  9964. BPF_STMT(BPF_LD | BPF_IND | BPF_B, -0x1),
  9965. BPF_STMT(BPF_RET | BPF_A, 0x0),
  9966. },
  9967. CLASSIC,
  9968. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  9969. { {0x40, 0x05 } },
  9970. },
  9971. {
  9972. "LD_IND byte positive offset, all ff",
  9973. .u.insns = {
  9974. BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
  9975. BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
  9976. BPF_STMT(BPF_RET | BPF_A, 0x0),
  9977. },
  9978. CLASSIC,
  9979. { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
  9980. { {0x40, 0xff } },
  9981. },
  9982. {
  9983. "LD_IND byte positive offset, out of bounds",
  9984. .u.insns = {
  9985. BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
  9986. BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
  9987. BPF_STMT(BPF_RET | BPF_A, 0x0),
  9988. },
  9989. CLASSIC,
  9990. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  9991. { {0x3f, 0 }, },
  9992. },
  9993. {
  9994. "LD_IND byte negative offset, out of bounds",
  9995. .u.insns = {
  9996. BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
  9997. BPF_STMT(BPF_LD | BPF_IND | BPF_B, -0x3f),
  9998. BPF_STMT(BPF_RET | BPF_A, 0x0),
  9999. },
  10000. CLASSIC,
  10001. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10002. { {0x3f, 0 } },
  10003. },
  10004. {
  10005. "LD_IND byte negative offset, multiple calls",
  10006. .u.insns = {
  10007. BPF_STMT(BPF_LDX | BPF_IMM, 0x3b),
  10008. BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 1),
  10009. BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 2),
  10010. BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 3),
  10011. BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 4),
  10012. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10013. },
  10014. CLASSIC,
  10015. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10016. { {0x40, 0x82 }, },
  10017. },
  10018. {
  10019. "LD_IND halfword positive offset",
  10020. .u.insns = {
  10021. BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
  10022. BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x2),
  10023. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10024. },
  10025. CLASSIC,
  10026. {
  10027. [0x1c] = 0xaa, [0x1d] = 0x55,
  10028. [0x1e] = 0xbb, [0x1f] = 0x66,
  10029. [0x20] = 0xcc, [0x21] = 0x77,
  10030. [0x22] = 0xdd, [0x23] = 0x88,
  10031. },
  10032. { {0x40, 0xdd88 } },
  10033. },
  10034. {
  10035. "LD_IND halfword negative offset",
  10036. .u.insns = {
  10037. BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
  10038. BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x2),
  10039. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10040. },
  10041. CLASSIC,
  10042. {
  10043. [0x1c] = 0xaa, [0x1d] = 0x55,
  10044. [0x1e] = 0xbb, [0x1f] = 0x66,
  10045. [0x20] = 0xcc, [0x21] = 0x77,
  10046. [0x22] = 0xdd, [0x23] = 0x88,
  10047. },
  10048. { {0x40, 0xbb66 } },
  10049. },
  10050. {
  10051. "LD_IND halfword unaligned",
  10052. .u.insns = {
  10053. BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
  10054. BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x1),
  10055. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10056. },
  10057. CLASSIC,
  10058. {
  10059. [0x1c] = 0xaa, [0x1d] = 0x55,
  10060. [0x1e] = 0xbb, [0x1f] = 0x66,
  10061. [0x20] = 0xcc, [0x21] = 0x77,
  10062. [0x22] = 0xdd, [0x23] = 0x88,
  10063. },
  10064. { {0x40, 0x66cc } },
  10065. },
  10066. {
  10067. "LD_IND halfword positive offset, all ff",
  10068. .u.insns = {
  10069. BPF_STMT(BPF_LDX | BPF_IMM, 0x3d),
  10070. BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x1),
  10071. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10072. },
  10073. CLASSIC,
  10074. { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
  10075. { {0x40, 0xffff } },
  10076. },
  10077. {
  10078. "LD_IND halfword positive offset, out of bounds",
  10079. .u.insns = {
  10080. BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
  10081. BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x1),
  10082. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10083. },
  10084. CLASSIC,
  10085. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10086. { {0x3f, 0 }, },
  10087. },
  10088. {
  10089. "LD_IND halfword negative offset, out of bounds",
  10090. .u.insns = {
  10091. BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
  10092. BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x3f),
  10093. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10094. },
  10095. CLASSIC,
  10096. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10097. { {0x3f, 0 } },
  10098. },
  10099. {
  10100. "LD_IND word positive offset",
  10101. .u.insns = {
  10102. BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
  10103. BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x4),
  10104. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10105. },
  10106. CLASSIC,
  10107. {
  10108. [0x1c] = 0xaa, [0x1d] = 0x55,
  10109. [0x1e] = 0xbb, [0x1f] = 0x66,
  10110. [0x20] = 0xcc, [0x21] = 0x77,
  10111. [0x22] = 0xdd, [0x23] = 0x88,
  10112. [0x24] = 0xee, [0x25] = 0x99,
  10113. [0x26] = 0xff, [0x27] = 0xaa,
  10114. },
  10115. { {0x40, 0xee99ffaa } },
  10116. },
  10117. {
  10118. "LD_IND word negative offset",
  10119. .u.insns = {
  10120. BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
  10121. BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x4),
  10122. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10123. },
  10124. CLASSIC,
  10125. {
  10126. [0x1c] = 0xaa, [0x1d] = 0x55,
  10127. [0x1e] = 0xbb, [0x1f] = 0x66,
  10128. [0x20] = 0xcc, [0x21] = 0x77,
  10129. [0x22] = 0xdd, [0x23] = 0x88,
  10130. [0x24] = 0xee, [0x25] = 0x99,
  10131. [0x26] = 0xff, [0x27] = 0xaa,
  10132. },
  10133. { {0x40, 0xaa55bb66 } },
  10134. },
  10135. {
  10136. "LD_IND word unaligned (addr & 3 == 2)",
  10137. .u.insns = {
  10138. BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
  10139. BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x2),
  10140. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10141. },
  10142. CLASSIC,
  10143. {
  10144. [0x1c] = 0xaa, [0x1d] = 0x55,
  10145. [0x1e] = 0xbb, [0x1f] = 0x66,
  10146. [0x20] = 0xcc, [0x21] = 0x77,
  10147. [0x22] = 0xdd, [0x23] = 0x88,
  10148. [0x24] = 0xee, [0x25] = 0x99,
  10149. [0x26] = 0xff, [0x27] = 0xaa,
  10150. },
  10151. { {0x40, 0xbb66cc77 } },
  10152. },
  10153. {
  10154. "LD_IND word unaligned (addr & 3 == 1)",
  10155. .u.insns = {
  10156. BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
  10157. BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x3),
  10158. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10159. },
  10160. CLASSIC,
  10161. {
  10162. [0x1c] = 0xaa, [0x1d] = 0x55,
  10163. [0x1e] = 0xbb, [0x1f] = 0x66,
  10164. [0x20] = 0xcc, [0x21] = 0x77,
  10165. [0x22] = 0xdd, [0x23] = 0x88,
  10166. [0x24] = 0xee, [0x25] = 0x99,
  10167. [0x26] = 0xff, [0x27] = 0xaa,
  10168. },
  10169. { {0x40, 0x55bb66cc } },
  10170. },
  10171. {
  10172. "LD_IND word unaligned (addr & 3 == 3)",
  10173. .u.insns = {
  10174. BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
  10175. BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x1),
  10176. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10177. },
  10178. CLASSIC,
  10179. {
  10180. [0x1c] = 0xaa, [0x1d] = 0x55,
  10181. [0x1e] = 0xbb, [0x1f] = 0x66,
  10182. [0x20] = 0xcc, [0x21] = 0x77,
  10183. [0x22] = 0xdd, [0x23] = 0x88,
  10184. [0x24] = 0xee, [0x25] = 0x99,
  10185. [0x26] = 0xff, [0x27] = 0xaa,
  10186. },
  10187. { {0x40, 0x66cc77dd } },
  10188. },
  10189. {
  10190. "LD_IND word positive offset, all ff",
  10191. .u.insns = {
  10192. BPF_STMT(BPF_LDX | BPF_IMM, 0x3b),
  10193. BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x1),
  10194. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10195. },
  10196. CLASSIC,
  10197. { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
  10198. { {0x40, 0xffffffff } },
  10199. },
  10200. {
  10201. "LD_IND word positive offset, out of bounds",
  10202. .u.insns = {
  10203. BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
  10204. BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x1),
  10205. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10206. },
  10207. CLASSIC,
  10208. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10209. { {0x3f, 0 }, },
  10210. },
  10211. {
  10212. "LD_IND word negative offset, out of bounds",
  10213. .u.insns = {
  10214. BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
  10215. BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x3f),
  10216. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10217. },
  10218. CLASSIC,
  10219. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10220. { {0x3f, 0 } },
  10221. },
  10222. {
  10223. "LD_ABS byte",
  10224. .u.insns = {
  10225. BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x20),
  10226. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10227. },
  10228. CLASSIC,
  10229. {
  10230. [0x1c] = 0xaa, [0x1d] = 0x55,
  10231. [0x1e] = 0xbb, [0x1f] = 0x66,
  10232. [0x20] = 0xcc, [0x21] = 0x77,
  10233. [0x22] = 0xdd, [0x23] = 0x88,
  10234. [0x24] = 0xee, [0x25] = 0x99,
  10235. [0x26] = 0xff, [0x27] = 0xaa,
  10236. },
  10237. { {0x40, 0xcc } },
  10238. },
  10239. {
  10240. "LD_ABS byte positive offset, all ff",
  10241. .u.insns = {
  10242. BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x3f),
  10243. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10244. },
  10245. CLASSIC,
  10246. { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
  10247. { {0x40, 0xff } },
  10248. },
  10249. {
  10250. "LD_ABS byte positive offset, out of bounds",
  10251. .u.insns = {
  10252. BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x3f),
  10253. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10254. },
  10255. CLASSIC,
  10256. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10257. { {0x3f, 0 }, },
  10258. },
  10259. {
  10260. "LD_ABS byte negative offset, out of bounds load",
  10261. .u.insns = {
  10262. BPF_STMT(BPF_LD | BPF_ABS | BPF_B, -1),
  10263. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10264. },
  10265. CLASSIC | FLAG_EXPECTED_FAIL,
  10266. .expected_errcode = -EINVAL,
  10267. },
  10268. {
  10269. "LD_ABS byte negative offset, in bounds",
  10270. .u.insns = {
  10271. BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
  10272. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10273. },
  10274. CLASSIC,
  10275. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10276. { {0x40, 0x82 }, },
  10277. },
  10278. {
  10279. "LD_ABS byte negative offset, out of bounds",
  10280. .u.insns = {
  10281. BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
  10282. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10283. },
  10284. CLASSIC,
  10285. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10286. { {0x3f, 0 }, },
  10287. },
  10288. {
  10289. "LD_ABS byte negative offset, multiple calls",
  10290. .u.insns = {
  10291. BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3c),
  10292. BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3d),
  10293. BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3e),
  10294. BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
  10295. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10296. },
  10297. CLASSIC,
  10298. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10299. { {0x40, 0x82 }, },
  10300. },
  10301. {
  10302. "LD_ABS halfword",
  10303. .u.insns = {
  10304. BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x22),
  10305. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10306. },
  10307. CLASSIC,
  10308. {
  10309. [0x1c] = 0xaa, [0x1d] = 0x55,
  10310. [0x1e] = 0xbb, [0x1f] = 0x66,
  10311. [0x20] = 0xcc, [0x21] = 0x77,
  10312. [0x22] = 0xdd, [0x23] = 0x88,
  10313. [0x24] = 0xee, [0x25] = 0x99,
  10314. [0x26] = 0xff, [0x27] = 0xaa,
  10315. },
  10316. { {0x40, 0xdd88 } },
  10317. },
  10318. {
  10319. "LD_ABS halfword unaligned",
  10320. .u.insns = {
  10321. BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x25),
  10322. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10323. },
  10324. CLASSIC,
  10325. {
  10326. [0x1c] = 0xaa, [0x1d] = 0x55,
  10327. [0x1e] = 0xbb, [0x1f] = 0x66,
  10328. [0x20] = 0xcc, [0x21] = 0x77,
  10329. [0x22] = 0xdd, [0x23] = 0x88,
  10330. [0x24] = 0xee, [0x25] = 0x99,
  10331. [0x26] = 0xff, [0x27] = 0xaa,
  10332. },
  10333. { {0x40, 0x99ff } },
  10334. },
  10335. {
  10336. "LD_ABS halfword positive offset, all ff",
  10337. .u.insns = {
  10338. BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3e),
  10339. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10340. },
  10341. CLASSIC,
  10342. { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
  10343. { {0x40, 0xffff } },
  10344. },
  10345. {
  10346. "LD_ABS halfword positive offset, out of bounds",
  10347. .u.insns = {
  10348. BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3f),
  10349. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10350. },
  10351. CLASSIC,
  10352. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10353. { {0x3f, 0 }, },
  10354. },
  10355. {
  10356. "LD_ABS halfword negative offset, out of bounds load",
  10357. .u.insns = {
  10358. BPF_STMT(BPF_LD | BPF_ABS | BPF_H, -1),
  10359. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10360. },
  10361. CLASSIC | FLAG_EXPECTED_FAIL,
  10362. .expected_errcode = -EINVAL,
  10363. },
  10364. {
  10365. "LD_ABS halfword negative offset, in bounds",
  10366. .u.insns = {
  10367. BPF_STMT(BPF_LD | BPF_ABS | BPF_H, SKF_LL_OFF + 0x3e),
  10368. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10369. },
  10370. CLASSIC,
  10371. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10372. { {0x40, 0x1982 }, },
  10373. },
  10374. {
  10375. "LD_ABS halfword negative offset, out of bounds",
  10376. .u.insns = {
  10377. BPF_STMT(BPF_LD | BPF_ABS | BPF_H, SKF_LL_OFF + 0x3e),
  10378. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10379. },
  10380. CLASSIC,
  10381. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10382. { {0x3f, 0 }, },
  10383. },
  10384. {
  10385. "LD_ABS word",
  10386. .u.insns = {
  10387. BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x1c),
  10388. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10389. },
  10390. CLASSIC,
  10391. {
  10392. [0x1c] = 0xaa, [0x1d] = 0x55,
  10393. [0x1e] = 0xbb, [0x1f] = 0x66,
  10394. [0x20] = 0xcc, [0x21] = 0x77,
  10395. [0x22] = 0xdd, [0x23] = 0x88,
  10396. [0x24] = 0xee, [0x25] = 0x99,
  10397. [0x26] = 0xff, [0x27] = 0xaa,
  10398. },
  10399. { {0x40, 0xaa55bb66 } },
  10400. },
  10401. {
  10402. "LD_ABS word unaligned (addr & 3 == 2)",
  10403. .u.insns = {
  10404. BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x22),
  10405. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10406. },
  10407. CLASSIC,
  10408. {
  10409. [0x1c] = 0xaa, [0x1d] = 0x55,
  10410. [0x1e] = 0xbb, [0x1f] = 0x66,
  10411. [0x20] = 0xcc, [0x21] = 0x77,
  10412. [0x22] = 0xdd, [0x23] = 0x88,
  10413. [0x24] = 0xee, [0x25] = 0x99,
  10414. [0x26] = 0xff, [0x27] = 0xaa,
  10415. },
  10416. { {0x40, 0xdd88ee99 } },
  10417. },
  10418. {
  10419. "LD_ABS word unaligned (addr & 3 == 1)",
  10420. .u.insns = {
  10421. BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x21),
  10422. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10423. },
  10424. CLASSIC,
  10425. {
  10426. [0x1c] = 0xaa, [0x1d] = 0x55,
  10427. [0x1e] = 0xbb, [0x1f] = 0x66,
  10428. [0x20] = 0xcc, [0x21] = 0x77,
  10429. [0x22] = 0xdd, [0x23] = 0x88,
  10430. [0x24] = 0xee, [0x25] = 0x99,
  10431. [0x26] = 0xff, [0x27] = 0xaa,
  10432. },
  10433. { {0x40, 0x77dd88ee } },
  10434. },
  10435. {
  10436. "LD_ABS word unaligned (addr & 3 == 3)",
  10437. .u.insns = {
  10438. BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x23),
  10439. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10440. },
  10441. CLASSIC,
  10442. {
  10443. [0x1c] = 0xaa, [0x1d] = 0x55,
  10444. [0x1e] = 0xbb, [0x1f] = 0x66,
  10445. [0x20] = 0xcc, [0x21] = 0x77,
  10446. [0x22] = 0xdd, [0x23] = 0x88,
  10447. [0x24] = 0xee, [0x25] = 0x99,
  10448. [0x26] = 0xff, [0x27] = 0xaa,
  10449. },
  10450. { {0x40, 0x88ee99ff } },
  10451. },
  10452. {
  10453. "LD_ABS word positive offset, all ff",
  10454. .u.insns = {
  10455. BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3c),
  10456. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10457. },
  10458. CLASSIC,
  10459. { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
  10460. { {0x40, 0xffffffff } },
  10461. },
  10462. {
  10463. "LD_ABS word positive offset, out of bounds",
  10464. .u.insns = {
  10465. BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3f),
  10466. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10467. },
  10468. CLASSIC,
  10469. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10470. { {0x3f, 0 }, },
  10471. },
  10472. {
  10473. "LD_ABS word negative offset, out of bounds load",
  10474. .u.insns = {
  10475. BPF_STMT(BPF_LD | BPF_ABS | BPF_W, -1),
  10476. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10477. },
  10478. CLASSIC | FLAG_EXPECTED_FAIL,
  10479. .expected_errcode = -EINVAL,
  10480. },
  10481. {
  10482. "LD_ABS word negative offset, in bounds",
  10483. .u.insns = {
  10484. BPF_STMT(BPF_LD | BPF_ABS | BPF_W, SKF_LL_OFF + 0x3c),
  10485. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10486. },
  10487. CLASSIC,
  10488. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10489. { {0x40, 0x25051982 }, },
  10490. },
  10491. {
  10492. "LD_ABS word negative offset, out of bounds",
  10493. .u.insns = {
  10494. BPF_STMT(BPF_LD | BPF_ABS | BPF_W, SKF_LL_OFF + 0x3c),
  10495. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10496. },
  10497. CLASSIC,
  10498. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10499. { {0x3f, 0 }, },
  10500. },
  10501. {
  10502. "LDX_MSH standalone, preserved A",
  10503. .u.insns = {
  10504. BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
  10505. BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
  10506. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10507. },
  10508. CLASSIC,
  10509. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10510. { {0x40, 0xffeebbaa }, },
  10511. },
  10512. {
  10513. "LDX_MSH standalone, preserved A 2",
  10514. .u.insns = {
  10515. BPF_STMT(BPF_LD | BPF_IMM, 0x175e9d63),
  10516. BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
  10517. BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3d),
  10518. BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3e),
  10519. BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3f),
  10520. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10521. },
  10522. CLASSIC,
  10523. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10524. { {0x40, 0x175e9d63 }, },
  10525. },
  10526. {
  10527. "LDX_MSH standalone, test result 1",
  10528. .u.insns = {
  10529. BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
  10530. BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
  10531. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  10532. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10533. },
  10534. CLASSIC,
  10535. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10536. { {0x40, 0x14 }, },
  10537. },
  10538. {
  10539. "LDX_MSH standalone, test result 2",
  10540. .u.insns = {
  10541. BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
  10542. BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3e),
  10543. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  10544. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10545. },
  10546. CLASSIC,
  10547. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10548. { {0x40, 0x24 }, },
  10549. },
  10550. {
  10551. "LDX_MSH standalone, negative offset",
  10552. .u.insns = {
  10553. BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
  10554. BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, -1),
  10555. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  10556. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10557. },
  10558. CLASSIC,
  10559. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10560. { {0x40, 0 }, },
  10561. },
  10562. {
  10563. "LDX_MSH standalone, negative offset 2",
  10564. .u.insns = {
  10565. BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
  10566. BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, SKF_LL_OFF + 0x3e),
  10567. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  10568. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10569. },
  10570. CLASSIC,
  10571. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10572. { {0x40, 0x24 }, },
  10573. },
  10574. {
  10575. "LDX_MSH standalone, out of bounds",
  10576. .u.insns = {
  10577. BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
  10578. BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x40),
  10579. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  10580. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10581. },
  10582. CLASSIC,
  10583. { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
  10584. { {0x40, 0 }, },
  10585. },
  10586. /*
  10587. * verify that the interpreter or JIT correctly sets A and X
  10588. * to 0.
  10589. */
  10590. {
  10591. "ADD default X",
  10592. .u.insns = {
  10593. /*
  10594. * A = 0x42
  10595. * A = A + X
  10596. * ret A
  10597. */
  10598. BPF_STMT(BPF_LD | BPF_IMM, 0x42),
  10599. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  10600. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10601. },
  10602. CLASSIC | FLAG_NO_DATA,
  10603. {},
  10604. { {0x1, 0x42 } },
  10605. },
  10606. {
  10607. "ADD default A",
  10608. .u.insns = {
  10609. /*
  10610. * A = A + 0x42
  10611. * ret A
  10612. */
  10613. BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0x42),
  10614. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10615. },
  10616. CLASSIC | FLAG_NO_DATA,
  10617. {},
  10618. { {0x1, 0x42 } },
  10619. },
  10620. {
  10621. "SUB default X",
  10622. .u.insns = {
  10623. /*
  10624. * A = 0x66
  10625. * A = A - X
  10626. * ret A
  10627. */
  10628. BPF_STMT(BPF_LD | BPF_IMM, 0x66),
  10629. BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
  10630. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10631. },
  10632. CLASSIC | FLAG_NO_DATA,
  10633. {},
  10634. { {0x1, 0x66 } },
  10635. },
  10636. {
  10637. "SUB default A",
  10638. .u.insns = {
  10639. /*
  10640. * A = A - -0x66
  10641. * ret A
  10642. */
  10643. BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, -0x66),
  10644. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10645. },
  10646. CLASSIC | FLAG_NO_DATA,
  10647. {},
  10648. { {0x1, 0x66 } },
  10649. },
  10650. {
  10651. "MUL default X",
  10652. .u.insns = {
  10653. /*
  10654. * A = 0x42
  10655. * A = A * X
  10656. * ret A
  10657. */
  10658. BPF_STMT(BPF_LD | BPF_IMM, 0x42),
  10659. BPF_STMT(BPF_ALU | BPF_MUL | BPF_X, 0),
  10660. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10661. },
  10662. CLASSIC | FLAG_NO_DATA,
  10663. {},
  10664. { {0x1, 0x0 } },
  10665. },
  10666. {
  10667. "MUL default A",
  10668. .u.insns = {
  10669. /*
  10670. * A = A * 0x66
  10671. * ret A
  10672. */
  10673. BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 0x66),
  10674. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10675. },
  10676. CLASSIC | FLAG_NO_DATA,
  10677. {},
  10678. { {0x1, 0x0 } },
  10679. },
  10680. {
  10681. "DIV default X",
  10682. .u.insns = {
  10683. /*
  10684. * A = 0x42
  10685. * A = A / X ; this halt the filter execution if X is 0
  10686. * ret 0x42
  10687. */
  10688. BPF_STMT(BPF_LD | BPF_IMM, 0x42),
  10689. BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0),
  10690. BPF_STMT(BPF_RET | BPF_K, 0x42),
  10691. },
  10692. CLASSIC | FLAG_NO_DATA,
  10693. {},
  10694. { {0x1, 0x0 } },
  10695. },
  10696. {
  10697. "DIV default A",
  10698. .u.insns = {
  10699. /*
  10700. * A = A / 1
  10701. * ret A
  10702. */
  10703. BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x1),
  10704. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10705. },
  10706. CLASSIC | FLAG_NO_DATA,
  10707. {},
  10708. { {0x1, 0x0 } },
  10709. },
  10710. {
  10711. "MOD default X",
  10712. .u.insns = {
  10713. /*
  10714. * A = 0x42
  10715. * A = A mod X ; this halt the filter execution if X is 0
  10716. * ret 0x42
  10717. */
  10718. BPF_STMT(BPF_LD | BPF_IMM, 0x42),
  10719. BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0),
  10720. BPF_STMT(BPF_RET | BPF_K, 0x42),
  10721. },
  10722. CLASSIC | FLAG_NO_DATA,
  10723. {},
  10724. { {0x1, 0x0 } },
  10725. },
  10726. {
  10727. "MOD default A",
  10728. .u.insns = {
  10729. /*
  10730. * A = A mod 1
  10731. * ret A
  10732. */
  10733. BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x1),
  10734. BPF_STMT(BPF_RET | BPF_A, 0x0),
  10735. },
  10736. CLASSIC | FLAG_NO_DATA,
  10737. {},
  10738. { {0x1, 0x0 } },
  10739. },
  10740. {
  10741. "JMP EQ default A",
  10742. .u.insns = {
  10743. /*
  10744. * cmp A, 0x0, 0, 1
  10745. * ret 0x42
  10746. * ret 0x66
  10747. */
  10748. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0, 0, 1),
  10749. BPF_STMT(BPF_RET | BPF_K, 0x42),
  10750. BPF_STMT(BPF_RET | BPF_K, 0x66),
  10751. },
  10752. CLASSIC | FLAG_NO_DATA,
  10753. {},
  10754. { {0x1, 0x42 } },
  10755. },
  10756. {
  10757. "JMP EQ default X",
  10758. .u.insns = {
  10759. /*
  10760. * A = 0x0
  10761. * cmp A, X, 0, 1
  10762. * ret 0x42
  10763. * ret 0x66
  10764. */
  10765. BPF_STMT(BPF_LD | BPF_IMM, 0x0),
  10766. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0x0, 0, 1),
  10767. BPF_STMT(BPF_RET | BPF_K, 0x42),
  10768. BPF_STMT(BPF_RET | BPF_K, 0x66),
  10769. },
  10770. CLASSIC | FLAG_NO_DATA,
  10771. {},
  10772. { {0x1, 0x42 } },
  10773. },
  10774. /* Checking interpreter vs JIT wrt signed extended imms. */
  10775. {
  10776. "JNE signed compare, test 1",
  10777. .u.insns_int = {
  10778. BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
  10779. BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
  10780. BPF_MOV64_REG(R2, R1),
  10781. BPF_ALU64_REG(BPF_AND, R2, R3),
  10782. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  10783. BPF_JMP_IMM(BPF_JNE, R2, -17104896, 1),
  10784. BPF_ALU32_IMM(BPF_MOV, R0, 2),
  10785. BPF_EXIT_INSN(),
  10786. },
  10787. INTERNAL,
  10788. { },
  10789. { { 0, 1 } },
  10790. },
  10791. {
  10792. "JNE signed compare, test 2",
  10793. .u.insns_int = {
  10794. BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
  10795. BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
  10796. BPF_MOV64_REG(R2, R1),
  10797. BPF_ALU64_REG(BPF_AND, R2, R3),
  10798. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  10799. BPF_JMP_IMM(BPF_JNE, R2, 0xfefb0000, 1),
  10800. BPF_ALU32_IMM(BPF_MOV, R0, 2),
  10801. BPF_EXIT_INSN(),
  10802. },
  10803. INTERNAL,
  10804. { },
  10805. { { 0, 1 } },
  10806. },
  10807. {
  10808. "JNE signed compare, test 3",
  10809. .u.insns_int = {
  10810. BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
  10811. BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
  10812. BPF_ALU32_IMM(BPF_MOV, R4, 0xfefb0000),
  10813. BPF_MOV64_REG(R2, R1),
  10814. BPF_ALU64_REG(BPF_AND, R2, R3),
  10815. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  10816. BPF_JMP_REG(BPF_JNE, R2, R4, 1),
  10817. BPF_ALU32_IMM(BPF_MOV, R0, 2),
  10818. BPF_EXIT_INSN(),
  10819. },
  10820. INTERNAL,
  10821. { },
  10822. { { 0, 2 } },
  10823. },
  10824. {
  10825. "JNE signed compare, test 4",
  10826. .u.insns_int = {
  10827. BPF_LD_IMM64(R1, -17104896),
  10828. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  10829. BPF_JMP_IMM(BPF_JNE, R1, -17104896, 1),
  10830. BPF_ALU32_IMM(BPF_MOV, R0, 2),
  10831. BPF_EXIT_INSN(),
  10832. },
  10833. INTERNAL,
  10834. { },
  10835. { { 0, 2 } },
  10836. },
  10837. {
  10838. "JNE signed compare, test 5",
  10839. .u.insns_int = {
  10840. BPF_LD_IMM64(R1, 0xfefb0000),
  10841. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  10842. BPF_JMP_IMM(BPF_JNE, R1, 0xfefb0000, 1),
  10843. BPF_ALU32_IMM(BPF_MOV, R0, 2),
  10844. BPF_EXIT_INSN(),
  10845. },
  10846. INTERNAL,
  10847. { },
  10848. { { 0, 1 } },
  10849. },
  10850. {
  10851. "JNE signed compare, test 6",
  10852. .u.insns_int = {
  10853. BPF_LD_IMM64(R1, 0x7efb0000),
  10854. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  10855. BPF_JMP_IMM(BPF_JNE, R1, 0x7efb0000, 1),
  10856. BPF_ALU32_IMM(BPF_MOV, R0, 2),
  10857. BPF_EXIT_INSN(),
  10858. },
  10859. INTERNAL,
  10860. { },
  10861. { { 0, 2 } },
  10862. },
  10863. {
  10864. "JNE signed compare, test 7",
  10865. .u.insns = {
  10866. BPF_STMT(BPF_LD | BPF_IMM, 0xffff0000),
  10867. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  10868. BPF_STMT(BPF_LD | BPF_IMM, 0xfefbbc12),
  10869. BPF_STMT(BPF_ALU | BPF_AND | BPF_X, 0),
  10870. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0xfefb0000, 1, 0),
  10871. BPF_STMT(BPF_RET | BPF_K, 1),
  10872. BPF_STMT(BPF_RET | BPF_K, 2),
  10873. },
  10874. CLASSIC | FLAG_NO_DATA,
  10875. {},
  10876. { { 0, 2 } },
  10877. },
  10878. /* BPF_LDX_MEM with operand aliasing */
  10879. {
  10880. "LDX_MEM_B: operand register aliasing",
  10881. .u.insns_int = {
  10882. BPF_ST_MEM(BPF_B, R10, -8, 123),
  10883. BPF_MOV64_REG(R0, R10),
  10884. BPF_LDX_MEM(BPF_B, R0, R0, -8),
  10885. BPF_EXIT_INSN(),
  10886. },
  10887. INTERNAL,
  10888. { },
  10889. { { 0, 123 } },
  10890. .stack_depth = 8,
  10891. },
  10892. {
  10893. "LDX_MEM_H: operand register aliasing",
  10894. .u.insns_int = {
  10895. BPF_ST_MEM(BPF_H, R10, -8, 12345),
  10896. BPF_MOV64_REG(R0, R10),
  10897. BPF_LDX_MEM(BPF_H, R0, R0, -8),
  10898. BPF_EXIT_INSN(),
  10899. },
  10900. INTERNAL,
  10901. { },
  10902. { { 0, 12345 } },
  10903. .stack_depth = 8,
  10904. },
  10905. {
  10906. "LDX_MEM_W: operand register aliasing",
  10907. .u.insns_int = {
  10908. BPF_ST_MEM(BPF_W, R10, -8, 123456789),
  10909. BPF_MOV64_REG(R0, R10),
  10910. BPF_LDX_MEM(BPF_W, R0, R0, -8),
  10911. BPF_EXIT_INSN(),
  10912. },
  10913. INTERNAL,
  10914. { },
  10915. { { 0, 123456789 } },
  10916. .stack_depth = 8,
  10917. },
  10918. {
  10919. "LDX_MEM_DW: operand register aliasing",
  10920. .u.insns_int = {
  10921. BPF_LD_IMM64(R1, 0x123456789abcdefULL),
  10922. BPF_STX_MEM(BPF_DW, R10, R1, -8),
  10923. BPF_MOV64_REG(R0, R10),
  10924. BPF_LDX_MEM(BPF_DW, R0, R0, -8),
  10925. BPF_ALU64_REG(BPF_SUB, R0, R1),
  10926. BPF_MOV64_REG(R1, R0),
  10927. BPF_ALU64_IMM(BPF_RSH, R1, 32),
  10928. BPF_ALU64_REG(BPF_OR, R0, R1),
  10929. BPF_EXIT_INSN(),
  10930. },
  10931. INTERNAL,
  10932. { },
  10933. { { 0, 0 } },
  10934. .stack_depth = 8,
  10935. },
  10936. /*
  10937. * Register (non-)clobbering tests for the case where a JIT implements
  10938. * complex ALU or ATOMIC operations via function calls. If so, the
  10939. * function call must be transparent to the eBPF registers. The JIT
  10940. * must therefore save and restore relevant registers across the call.
  10941. * The following tests check that the eBPF registers retain their
  10942. * values after such an operation. Mainly intended for complex ALU
  10943. * and atomic operation, but we run it for all. You never know...
  10944. *
  10945. * Note that each operations should be tested twice with different
  10946. * destinations, to check preservation for all registers.
  10947. */
  10948. #define BPF_TEST_CLOBBER_ALU(alu, op, dst, src) \
  10949. { \
  10950. #alu "_" #op " to " #dst ": no clobbering", \
  10951. .u.insns_int = { \
  10952. BPF_ALU64_IMM(BPF_MOV, R0, R0), \
  10953. BPF_ALU64_IMM(BPF_MOV, R1, R1), \
  10954. BPF_ALU64_IMM(BPF_MOV, R2, R2), \
  10955. BPF_ALU64_IMM(BPF_MOV, R3, R3), \
  10956. BPF_ALU64_IMM(BPF_MOV, R4, R4), \
  10957. BPF_ALU64_IMM(BPF_MOV, R5, R5), \
  10958. BPF_ALU64_IMM(BPF_MOV, R6, R6), \
  10959. BPF_ALU64_IMM(BPF_MOV, R7, R7), \
  10960. BPF_ALU64_IMM(BPF_MOV, R8, R8), \
  10961. BPF_ALU64_IMM(BPF_MOV, R9, R9), \
  10962. BPF_##alu(BPF_ ##op, dst, src), \
  10963. BPF_ALU32_IMM(BPF_MOV, dst, dst), \
  10964. BPF_JMP_IMM(BPF_JNE, R0, R0, 10), \
  10965. BPF_JMP_IMM(BPF_JNE, R1, R1, 9), \
  10966. BPF_JMP_IMM(BPF_JNE, R2, R2, 8), \
  10967. BPF_JMP_IMM(BPF_JNE, R3, R3, 7), \
  10968. BPF_JMP_IMM(BPF_JNE, R4, R4, 6), \
  10969. BPF_JMP_IMM(BPF_JNE, R5, R5, 5), \
  10970. BPF_JMP_IMM(BPF_JNE, R6, R6, 4), \
  10971. BPF_JMP_IMM(BPF_JNE, R7, R7, 3), \
  10972. BPF_JMP_IMM(BPF_JNE, R8, R8, 2), \
  10973. BPF_JMP_IMM(BPF_JNE, R9, R9, 1), \
  10974. BPF_ALU64_IMM(BPF_MOV, R0, 1), \
  10975. BPF_EXIT_INSN(), \
  10976. }, \
  10977. INTERNAL, \
  10978. { }, \
  10979. { { 0, 1 } } \
  10980. }
  10981. /* ALU64 operations, register clobbering */
  10982. BPF_TEST_CLOBBER_ALU(ALU64_IMM, AND, R8, 123456789),
  10983. BPF_TEST_CLOBBER_ALU(ALU64_IMM, AND, R9, 123456789),
  10984. BPF_TEST_CLOBBER_ALU(ALU64_IMM, OR, R8, 123456789),
  10985. BPF_TEST_CLOBBER_ALU(ALU64_IMM, OR, R9, 123456789),
  10986. BPF_TEST_CLOBBER_ALU(ALU64_IMM, XOR, R8, 123456789),
  10987. BPF_TEST_CLOBBER_ALU(ALU64_IMM, XOR, R9, 123456789),
  10988. BPF_TEST_CLOBBER_ALU(ALU64_IMM, LSH, R8, 12),
  10989. BPF_TEST_CLOBBER_ALU(ALU64_IMM, LSH, R9, 12),
  10990. BPF_TEST_CLOBBER_ALU(ALU64_IMM, RSH, R8, 12),
  10991. BPF_TEST_CLOBBER_ALU(ALU64_IMM, RSH, R9, 12),
  10992. BPF_TEST_CLOBBER_ALU(ALU64_IMM, ARSH, R8, 12),
  10993. BPF_TEST_CLOBBER_ALU(ALU64_IMM, ARSH, R9, 12),
  10994. BPF_TEST_CLOBBER_ALU(ALU64_IMM, ADD, R8, 123456789),
  10995. BPF_TEST_CLOBBER_ALU(ALU64_IMM, ADD, R9, 123456789),
  10996. BPF_TEST_CLOBBER_ALU(ALU64_IMM, SUB, R8, 123456789),
  10997. BPF_TEST_CLOBBER_ALU(ALU64_IMM, SUB, R9, 123456789),
  10998. BPF_TEST_CLOBBER_ALU(ALU64_IMM, MUL, R8, 123456789),
  10999. BPF_TEST_CLOBBER_ALU(ALU64_IMM, MUL, R9, 123456789),
  11000. BPF_TEST_CLOBBER_ALU(ALU64_IMM, DIV, R8, 123456789),
  11001. BPF_TEST_CLOBBER_ALU(ALU64_IMM, DIV, R9, 123456789),
  11002. BPF_TEST_CLOBBER_ALU(ALU64_IMM, MOD, R8, 123456789),
  11003. BPF_TEST_CLOBBER_ALU(ALU64_IMM, MOD, R9, 123456789),
  11004. /* ALU32 immediate operations, register clobbering */
  11005. BPF_TEST_CLOBBER_ALU(ALU32_IMM, AND, R8, 123456789),
  11006. BPF_TEST_CLOBBER_ALU(ALU32_IMM, AND, R9, 123456789),
  11007. BPF_TEST_CLOBBER_ALU(ALU32_IMM, OR, R8, 123456789),
  11008. BPF_TEST_CLOBBER_ALU(ALU32_IMM, OR, R9, 123456789),
  11009. BPF_TEST_CLOBBER_ALU(ALU32_IMM, XOR, R8, 123456789),
  11010. BPF_TEST_CLOBBER_ALU(ALU32_IMM, XOR, R9, 123456789),
  11011. BPF_TEST_CLOBBER_ALU(ALU32_IMM, LSH, R8, 12),
  11012. BPF_TEST_CLOBBER_ALU(ALU32_IMM, LSH, R9, 12),
  11013. BPF_TEST_CLOBBER_ALU(ALU32_IMM, RSH, R8, 12),
  11014. BPF_TEST_CLOBBER_ALU(ALU32_IMM, RSH, R9, 12),
  11015. BPF_TEST_CLOBBER_ALU(ALU32_IMM, ARSH, R8, 12),
  11016. BPF_TEST_CLOBBER_ALU(ALU32_IMM, ARSH, R9, 12),
  11017. BPF_TEST_CLOBBER_ALU(ALU32_IMM, ADD, R8, 123456789),
  11018. BPF_TEST_CLOBBER_ALU(ALU32_IMM, ADD, R9, 123456789),
  11019. BPF_TEST_CLOBBER_ALU(ALU32_IMM, SUB, R8, 123456789),
  11020. BPF_TEST_CLOBBER_ALU(ALU32_IMM, SUB, R9, 123456789),
  11021. BPF_TEST_CLOBBER_ALU(ALU32_IMM, MUL, R8, 123456789),
  11022. BPF_TEST_CLOBBER_ALU(ALU32_IMM, MUL, R9, 123456789),
  11023. BPF_TEST_CLOBBER_ALU(ALU32_IMM, DIV, R8, 123456789),
  11024. BPF_TEST_CLOBBER_ALU(ALU32_IMM, DIV, R9, 123456789),
  11025. BPF_TEST_CLOBBER_ALU(ALU32_IMM, MOD, R8, 123456789),
  11026. BPF_TEST_CLOBBER_ALU(ALU32_IMM, MOD, R9, 123456789),
  11027. /* ALU64 register operations, register clobbering */
  11028. BPF_TEST_CLOBBER_ALU(ALU64_REG, AND, R8, R1),
  11029. BPF_TEST_CLOBBER_ALU(ALU64_REG, AND, R9, R1),
  11030. BPF_TEST_CLOBBER_ALU(ALU64_REG, OR, R8, R1),
  11031. BPF_TEST_CLOBBER_ALU(ALU64_REG, OR, R9, R1),
  11032. BPF_TEST_CLOBBER_ALU(ALU64_REG, XOR, R8, R1),
  11033. BPF_TEST_CLOBBER_ALU(ALU64_REG, XOR, R9, R1),
  11034. BPF_TEST_CLOBBER_ALU(ALU64_REG, LSH, R8, R1),
  11035. BPF_TEST_CLOBBER_ALU(ALU64_REG, LSH, R9, R1),
  11036. BPF_TEST_CLOBBER_ALU(ALU64_REG, RSH, R8, R1),
  11037. BPF_TEST_CLOBBER_ALU(ALU64_REG, RSH, R9, R1),
  11038. BPF_TEST_CLOBBER_ALU(ALU64_REG, ARSH, R8, R1),
  11039. BPF_TEST_CLOBBER_ALU(ALU64_REG, ARSH, R9, R1),
  11040. BPF_TEST_CLOBBER_ALU(ALU64_REG, ADD, R8, R1),
  11041. BPF_TEST_CLOBBER_ALU(ALU64_REG, ADD, R9, R1),
  11042. BPF_TEST_CLOBBER_ALU(ALU64_REG, SUB, R8, R1),
  11043. BPF_TEST_CLOBBER_ALU(ALU64_REG, SUB, R9, R1),
  11044. BPF_TEST_CLOBBER_ALU(ALU64_REG, MUL, R8, R1),
  11045. BPF_TEST_CLOBBER_ALU(ALU64_REG, MUL, R9, R1),
  11046. BPF_TEST_CLOBBER_ALU(ALU64_REG, DIV, R8, R1),
  11047. BPF_TEST_CLOBBER_ALU(ALU64_REG, DIV, R9, R1),
  11048. BPF_TEST_CLOBBER_ALU(ALU64_REG, MOD, R8, R1),
  11049. BPF_TEST_CLOBBER_ALU(ALU64_REG, MOD, R9, R1),
  11050. /* ALU32 register operations, register clobbering */
  11051. BPF_TEST_CLOBBER_ALU(ALU32_REG, AND, R8, R1),
  11052. BPF_TEST_CLOBBER_ALU(ALU32_REG, AND, R9, R1),
  11053. BPF_TEST_CLOBBER_ALU(ALU32_REG, OR, R8, R1),
  11054. BPF_TEST_CLOBBER_ALU(ALU32_REG, OR, R9, R1),
  11055. BPF_TEST_CLOBBER_ALU(ALU32_REG, XOR, R8, R1),
  11056. BPF_TEST_CLOBBER_ALU(ALU32_REG, XOR, R9, R1),
  11057. BPF_TEST_CLOBBER_ALU(ALU32_REG, LSH, R8, R1),
  11058. BPF_TEST_CLOBBER_ALU(ALU32_REG, LSH, R9, R1),
  11059. BPF_TEST_CLOBBER_ALU(ALU32_REG, RSH, R8, R1),
  11060. BPF_TEST_CLOBBER_ALU(ALU32_REG, RSH, R9, R1),
  11061. BPF_TEST_CLOBBER_ALU(ALU32_REG, ARSH, R8, R1),
  11062. BPF_TEST_CLOBBER_ALU(ALU32_REG, ARSH, R9, R1),
  11063. BPF_TEST_CLOBBER_ALU(ALU32_REG, ADD, R8, R1),
  11064. BPF_TEST_CLOBBER_ALU(ALU32_REG, ADD, R9, R1),
  11065. BPF_TEST_CLOBBER_ALU(ALU32_REG, SUB, R8, R1),
  11066. BPF_TEST_CLOBBER_ALU(ALU32_REG, SUB, R9, R1),
  11067. BPF_TEST_CLOBBER_ALU(ALU32_REG, MUL, R8, R1),
  11068. BPF_TEST_CLOBBER_ALU(ALU32_REG, MUL, R9, R1),
  11069. BPF_TEST_CLOBBER_ALU(ALU32_REG, DIV, R8, R1),
  11070. BPF_TEST_CLOBBER_ALU(ALU32_REG, DIV, R9, R1),
  11071. BPF_TEST_CLOBBER_ALU(ALU32_REG, MOD, R8, R1),
  11072. BPF_TEST_CLOBBER_ALU(ALU32_REG, MOD, R9, R1),
  11073. #undef BPF_TEST_CLOBBER_ALU
  11074. #define BPF_TEST_CLOBBER_ATOMIC(width, op) \
  11075. { \
  11076. "Atomic_" #width " " #op ": no clobbering", \
  11077. .u.insns_int = { \
  11078. BPF_ALU64_IMM(BPF_MOV, R0, 0), \
  11079. BPF_ALU64_IMM(BPF_MOV, R1, 1), \
  11080. BPF_ALU64_IMM(BPF_MOV, R2, 2), \
  11081. BPF_ALU64_IMM(BPF_MOV, R3, 3), \
  11082. BPF_ALU64_IMM(BPF_MOV, R4, 4), \
  11083. BPF_ALU64_IMM(BPF_MOV, R5, 5), \
  11084. BPF_ALU64_IMM(BPF_MOV, R6, 6), \
  11085. BPF_ALU64_IMM(BPF_MOV, R7, 7), \
  11086. BPF_ALU64_IMM(BPF_MOV, R8, 8), \
  11087. BPF_ALU64_IMM(BPF_MOV, R9, 9), \
  11088. BPF_ST_MEM(width, R10, -8, \
  11089. (op) == BPF_CMPXCHG ? 0 : \
  11090. (op) & BPF_FETCH ? 1 : 0), \
  11091. BPF_ATOMIC_OP(width, op, R10, R1, -8), \
  11092. BPF_JMP_IMM(BPF_JNE, R0, 0, 10), \
  11093. BPF_JMP_IMM(BPF_JNE, R1, 1, 9), \
  11094. BPF_JMP_IMM(BPF_JNE, R2, 2, 8), \
  11095. BPF_JMP_IMM(BPF_JNE, R3, 3, 7), \
  11096. BPF_JMP_IMM(BPF_JNE, R4, 4, 6), \
  11097. BPF_JMP_IMM(BPF_JNE, R5, 5, 5), \
  11098. BPF_JMP_IMM(BPF_JNE, R6, 6, 4), \
  11099. BPF_JMP_IMM(BPF_JNE, R7, 7, 3), \
  11100. BPF_JMP_IMM(BPF_JNE, R8, 8, 2), \
  11101. BPF_JMP_IMM(BPF_JNE, R9, 9, 1), \
  11102. BPF_ALU64_IMM(BPF_MOV, R0, 1), \
  11103. BPF_EXIT_INSN(), \
  11104. }, \
  11105. INTERNAL, \
  11106. { }, \
  11107. { { 0, 1 } }, \
  11108. .stack_depth = 8, \
  11109. }
  11110. /* 64-bit atomic operations, register clobbering */
  11111. BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_ADD),
  11112. BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_AND),
  11113. BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_OR),
  11114. BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XOR),
  11115. BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_ADD | BPF_FETCH),
  11116. BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_AND | BPF_FETCH),
  11117. BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_OR | BPF_FETCH),
  11118. BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XOR | BPF_FETCH),
  11119. BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XCHG),
  11120. BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_CMPXCHG),
  11121. /* 32-bit atomic operations, register clobbering */
  11122. BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_ADD),
  11123. BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_AND),
  11124. BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_OR),
  11125. BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XOR),
  11126. BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_ADD | BPF_FETCH),
  11127. BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_AND | BPF_FETCH),
  11128. BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_OR | BPF_FETCH),
  11129. BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XOR | BPF_FETCH),
  11130. BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XCHG),
  11131. BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_CMPXCHG),
  11132. #undef BPF_TEST_CLOBBER_ATOMIC
  11133. /* Checking that ALU32 src is not zero extended in place */
  11134. #define BPF_ALU32_SRC_ZEXT(op) \
  11135. { \
  11136. "ALU32_" #op "_X: src preserved in zext", \
  11137. .u.insns_int = { \
  11138. BPF_LD_IMM64(R1, 0x0123456789acbdefULL),\
  11139. BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),\
  11140. BPF_ALU64_REG(BPF_MOV, R0, R1), \
  11141. BPF_ALU32_REG(BPF_##op, R2, R1), \
  11142. BPF_ALU64_REG(BPF_SUB, R0, R1), \
  11143. BPF_ALU64_REG(BPF_MOV, R1, R0), \
  11144. BPF_ALU64_IMM(BPF_RSH, R1, 32), \
  11145. BPF_ALU64_REG(BPF_OR, R0, R1), \
  11146. BPF_EXIT_INSN(), \
  11147. }, \
  11148. INTERNAL, \
  11149. { }, \
  11150. { { 0, 0 } }, \
  11151. }
  11152. BPF_ALU32_SRC_ZEXT(MOV),
  11153. BPF_ALU32_SRC_ZEXT(AND),
  11154. BPF_ALU32_SRC_ZEXT(OR),
  11155. BPF_ALU32_SRC_ZEXT(XOR),
  11156. BPF_ALU32_SRC_ZEXT(ADD),
  11157. BPF_ALU32_SRC_ZEXT(SUB),
  11158. BPF_ALU32_SRC_ZEXT(MUL),
  11159. BPF_ALU32_SRC_ZEXT(DIV),
  11160. BPF_ALU32_SRC_ZEXT(MOD),
  11161. #undef BPF_ALU32_SRC_ZEXT
  11162. /* Checking that ATOMIC32 src is not zero extended in place */
  11163. #define BPF_ATOMIC32_SRC_ZEXT(op) \
  11164. { \
  11165. "ATOMIC_W_" #op ": src preserved in zext", \
  11166. .u.insns_int = { \
  11167. BPF_LD_IMM64(R0, 0x0123456789acbdefULL), \
  11168. BPF_ALU64_REG(BPF_MOV, R1, R0), \
  11169. BPF_ST_MEM(BPF_W, R10, -4, 0), \
  11170. BPF_ATOMIC_OP(BPF_W, BPF_##op, R10, R1, -4), \
  11171. BPF_ALU64_REG(BPF_SUB, R0, R1), \
  11172. BPF_ALU64_REG(BPF_MOV, R1, R0), \
  11173. BPF_ALU64_IMM(BPF_RSH, R1, 32), \
  11174. BPF_ALU64_REG(BPF_OR, R0, R1), \
  11175. BPF_EXIT_INSN(), \
  11176. }, \
  11177. INTERNAL, \
  11178. { }, \
  11179. { { 0, 0 } }, \
  11180. .stack_depth = 8, \
  11181. }
  11182. BPF_ATOMIC32_SRC_ZEXT(ADD),
  11183. BPF_ATOMIC32_SRC_ZEXT(AND),
  11184. BPF_ATOMIC32_SRC_ZEXT(OR),
  11185. BPF_ATOMIC32_SRC_ZEXT(XOR),
  11186. #undef BPF_ATOMIC32_SRC_ZEXT
  11187. /* Checking that CMPXCHG32 src is not zero extended in place */
  11188. {
  11189. "ATOMIC_W_CMPXCHG: src preserved in zext",
  11190. .u.insns_int = {
  11191. BPF_LD_IMM64(R1, 0x0123456789acbdefULL),
  11192. BPF_ALU64_REG(BPF_MOV, R2, R1),
  11193. BPF_ALU64_REG(BPF_MOV, R0, 0),
  11194. BPF_ST_MEM(BPF_W, R10, -4, 0),
  11195. BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R1, -4),
  11196. BPF_ALU64_REG(BPF_SUB, R1, R2),
  11197. BPF_ALU64_REG(BPF_MOV, R2, R1),
  11198. BPF_ALU64_IMM(BPF_RSH, R2, 32),
  11199. BPF_ALU64_REG(BPF_OR, R1, R2),
  11200. BPF_ALU64_REG(BPF_MOV, R0, R1),
  11201. BPF_EXIT_INSN(),
  11202. },
  11203. INTERNAL,
  11204. { },
  11205. { { 0, 0 } },
  11206. .stack_depth = 8,
  11207. },
  11208. /* Checking that JMP32 immediate src is not zero extended in place */
  11209. #define BPF_JMP32_IMM_ZEXT(op) \
  11210. { \
  11211. "JMP32_" #op "_K: operand preserved in zext", \
  11212. .u.insns_int = { \
  11213. BPF_LD_IMM64(R0, 0x0123456789acbdefULL),\
  11214. BPF_ALU64_REG(BPF_MOV, R1, R0), \
  11215. BPF_JMP32_IMM(BPF_##op, R0, 1234, 1), \
  11216. BPF_JMP_A(0), /* Nop */ \
  11217. BPF_ALU64_REG(BPF_SUB, R0, R1), \
  11218. BPF_ALU64_REG(BPF_MOV, R1, R0), \
  11219. BPF_ALU64_IMM(BPF_RSH, R1, 32), \
  11220. BPF_ALU64_REG(BPF_OR, R0, R1), \
  11221. BPF_EXIT_INSN(), \
  11222. }, \
  11223. INTERNAL, \
  11224. { }, \
  11225. { { 0, 0 } }, \
  11226. }
  11227. BPF_JMP32_IMM_ZEXT(JEQ),
  11228. BPF_JMP32_IMM_ZEXT(JNE),
  11229. BPF_JMP32_IMM_ZEXT(JSET),
  11230. BPF_JMP32_IMM_ZEXT(JGT),
  11231. BPF_JMP32_IMM_ZEXT(JGE),
  11232. BPF_JMP32_IMM_ZEXT(JLT),
  11233. BPF_JMP32_IMM_ZEXT(JLE),
  11234. BPF_JMP32_IMM_ZEXT(JSGT),
  11235. BPF_JMP32_IMM_ZEXT(JSGE),
  11236. BPF_JMP32_IMM_ZEXT(JSGT),
  11237. BPF_JMP32_IMM_ZEXT(JSLT),
  11238. BPF_JMP32_IMM_ZEXT(JSLE),
  11239. #undef BPF_JMP2_IMM_ZEXT
  11240. /* Checking that JMP32 dst & src are not zero extended in place */
  11241. #define BPF_JMP32_REG_ZEXT(op) \
  11242. { \
  11243. "JMP32_" #op "_X: operands preserved in zext", \
  11244. .u.insns_int = { \
  11245. BPF_LD_IMM64(R0, 0x0123456789acbdefULL),\
  11246. BPF_LD_IMM64(R1, 0xfedcba9876543210ULL),\
  11247. BPF_ALU64_REG(BPF_MOV, R2, R0), \
  11248. BPF_ALU64_REG(BPF_MOV, R3, R1), \
  11249. BPF_JMP32_IMM(BPF_##op, R0, R1, 1), \
  11250. BPF_JMP_A(0), /* Nop */ \
  11251. BPF_ALU64_REG(BPF_SUB, R0, R2), \
  11252. BPF_ALU64_REG(BPF_SUB, R1, R3), \
  11253. BPF_ALU64_REG(BPF_OR, R0, R1), \
  11254. BPF_ALU64_REG(BPF_MOV, R1, R0), \
  11255. BPF_ALU64_IMM(BPF_RSH, R1, 32), \
  11256. BPF_ALU64_REG(BPF_OR, R0, R1), \
  11257. BPF_EXIT_INSN(), \
  11258. }, \
  11259. INTERNAL, \
  11260. { }, \
  11261. { { 0, 0 } }, \
  11262. }
  11263. BPF_JMP32_REG_ZEXT(JEQ),
  11264. BPF_JMP32_REG_ZEXT(JNE),
  11265. BPF_JMP32_REG_ZEXT(JSET),
  11266. BPF_JMP32_REG_ZEXT(JGT),
  11267. BPF_JMP32_REG_ZEXT(JGE),
  11268. BPF_JMP32_REG_ZEXT(JLT),
  11269. BPF_JMP32_REG_ZEXT(JLE),
  11270. BPF_JMP32_REG_ZEXT(JSGT),
  11271. BPF_JMP32_REG_ZEXT(JSGE),
  11272. BPF_JMP32_REG_ZEXT(JSGT),
  11273. BPF_JMP32_REG_ZEXT(JSLT),
  11274. BPF_JMP32_REG_ZEXT(JSLE),
  11275. #undef BPF_JMP2_REG_ZEXT
  11276. /* ALU64 K register combinations */
  11277. {
  11278. "ALU64_MOV_K: registers",
  11279. { },
  11280. INTERNAL,
  11281. { },
  11282. { { 0, 1 } },
  11283. .fill_helper = bpf_fill_alu64_mov_imm_regs,
  11284. },
  11285. {
  11286. "ALU64_AND_K: registers",
  11287. { },
  11288. INTERNAL,
  11289. { },
  11290. { { 0, 1 } },
  11291. .fill_helper = bpf_fill_alu64_and_imm_regs,
  11292. },
  11293. {
  11294. "ALU64_OR_K: registers",
  11295. { },
  11296. INTERNAL,
  11297. { },
  11298. { { 0, 1 } },
  11299. .fill_helper = bpf_fill_alu64_or_imm_regs,
  11300. },
  11301. {
  11302. "ALU64_XOR_K: registers",
  11303. { },
  11304. INTERNAL,
  11305. { },
  11306. { { 0, 1 } },
  11307. .fill_helper = bpf_fill_alu64_xor_imm_regs,
  11308. },
  11309. {
  11310. "ALU64_LSH_K: registers",
  11311. { },
  11312. INTERNAL,
  11313. { },
  11314. { { 0, 1 } },
  11315. .fill_helper = bpf_fill_alu64_lsh_imm_regs,
  11316. },
  11317. {
  11318. "ALU64_RSH_K: registers",
  11319. { },
  11320. INTERNAL,
  11321. { },
  11322. { { 0, 1 } },
  11323. .fill_helper = bpf_fill_alu64_rsh_imm_regs,
  11324. },
  11325. {
  11326. "ALU64_ARSH_K: registers",
  11327. { },
  11328. INTERNAL,
  11329. { },
  11330. { { 0, 1 } },
  11331. .fill_helper = bpf_fill_alu64_arsh_imm_regs,
  11332. },
  11333. {
  11334. "ALU64_ADD_K: registers",
  11335. { },
  11336. INTERNAL,
  11337. { },
  11338. { { 0, 1 } },
  11339. .fill_helper = bpf_fill_alu64_add_imm_regs,
  11340. },
  11341. {
  11342. "ALU64_SUB_K: registers",
  11343. { },
  11344. INTERNAL,
  11345. { },
  11346. { { 0, 1 } },
  11347. .fill_helper = bpf_fill_alu64_sub_imm_regs,
  11348. },
  11349. {
  11350. "ALU64_MUL_K: registers",
  11351. { },
  11352. INTERNAL,
  11353. { },
  11354. { { 0, 1 } },
  11355. .fill_helper = bpf_fill_alu64_mul_imm_regs,
  11356. },
  11357. {
  11358. "ALU64_DIV_K: registers",
  11359. { },
  11360. INTERNAL,
  11361. { },
  11362. { { 0, 1 } },
  11363. .fill_helper = bpf_fill_alu64_div_imm_regs,
  11364. },
  11365. {
  11366. "ALU64_MOD_K: registers",
  11367. { },
  11368. INTERNAL,
  11369. { },
  11370. { { 0, 1 } },
  11371. .fill_helper = bpf_fill_alu64_mod_imm_regs,
  11372. },
  11373. /* ALU32 K registers */
  11374. {
  11375. "ALU32_MOV_K: registers",
  11376. { },
  11377. INTERNAL,
  11378. { },
  11379. { { 0, 1 } },
  11380. .fill_helper = bpf_fill_alu32_mov_imm_regs,
  11381. },
  11382. {
  11383. "ALU32_AND_K: registers",
  11384. { },
  11385. INTERNAL,
  11386. { },
  11387. { { 0, 1 } },
  11388. .fill_helper = bpf_fill_alu32_and_imm_regs,
  11389. },
  11390. {
  11391. "ALU32_OR_K: registers",
  11392. { },
  11393. INTERNAL,
  11394. { },
  11395. { { 0, 1 } },
  11396. .fill_helper = bpf_fill_alu32_or_imm_regs,
  11397. },
  11398. {
  11399. "ALU32_XOR_K: registers",
  11400. { },
  11401. INTERNAL,
  11402. { },
  11403. { { 0, 1 } },
  11404. .fill_helper = bpf_fill_alu32_xor_imm_regs,
  11405. },
  11406. {
  11407. "ALU32_LSH_K: registers",
  11408. { },
  11409. INTERNAL,
  11410. { },
  11411. { { 0, 1 } },
  11412. .fill_helper = bpf_fill_alu32_lsh_imm_regs,
  11413. },
  11414. {
  11415. "ALU32_RSH_K: registers",
  11416. { },
  11417. INTERNAL,
  11418. { },
  11419. { { 0, 1 } },
  11420. .fill_helper = bpf_fill_alu32_rsh_imm_regs,
  11421. },
  11422. {
  11423. "ALU32_ARSH_K: registers",
  11424. { },
  11425. INTERNAL,
  11426. { },
  11427. { { 0, 1 } },
  11428. .fill_helper = bpf_fill_alu32_arsh_imm_regs,
  11429. },
  11430. {
  11431. "ALU32_ADD_K: registers",
  11432. { },
  11433. INTERNAL,
  11434. { },
  11435. { { 0, 1 } },
  11436. .fill_helper = bpf_fill_alu32_add_imm_regs,
  11437. },
  11438. {
  11439. "ALU32_SUB_K: registers",
  11440. { },
  11441. INTERNAL,
  11442. { },
  11443. { { 0, 1 } },
  11444. .fill_helper = bpf_fill_alu32_sub_imm_regs,
  11445. },
  11446. {
  11447. "ALU32_MUL_K: registers",
  11448. { },
  11449. INTERNAL,
  11450. { },
  11451. { { 0, 1 } },
  11452. .fill_helper = bpf_fill_alu32_mul_imm_regs,
  11453. },
  11454. {
  11455. "ALU32_DIV_K: registers",
  11456. { },
  11457. INTERNAL,
  11458. { },
  11459. { { 0, 1 } },
  11460. .fill_helper = bpf_fill_alu32_div_imm_regs,
  11461. },
  11462. {
  11463. "ALU32_MOD_K: registers",
  11464. { },
  11465. INTERNAL,
  11466. { },
  11467. { { 0, 1 } },
  11468. .fill_helper = bpf_fill_alu32_mod_imm_regs,
  11469. },
  11470. /* ALU64 X register combinations */
  11471. {
  11472. "ALU64_MOV_X: register combinations",
  11473. { },
  11474. INTERNAL,
  11475. { },
  11476. { { 0, 1 } },
  11477. .fill_helper = bpf_fill_alu64_mov_reg_pairs,
  11478. },
  11479. {
  11480. "ALU64_AND_X: register combinations",
  11481. { },
  11482. INTERNAL,
  11483. { },
  11484. { { 0, 1 } },
  11485. .fill_helper = bpf_fill_alu64_and_reg_pairs,
  11486. },
  11487. {
  11488. "ALU64_OR_X: register combinations",
  11489. { },
  11490. INTERNAL,
  11491. { },
  11492. { { 0, 1 } },
  11493. .fill_helper = bpf_fill_alu64_or_reg_pairs,
  11494. },
  11495. {
  11496. "ALU64_XOR_X: register combinations",
  11497. { },
  11498. INTERNAL,
  11499. { },
  11500. { { 0, 1 } },
  11501. .fill_helper = bpf_fill_alu64_xor_reg_pairs,
  11502. },
  11503. {
  11504. "ALU64_LSH_X: register combinations",
  11505. { },
  11506. INTERNAL,
  11507. { },
  11508. { { 0, 1 } },
  11509. .fill_helper = bpf_fill_alu64_lsh_reg_pairs,
  11510. },
  11511. {
  11512. "ALU64_RSH_X: register combinations",
  11513. { },
  11514. INTERNAL,
  11515. { },
  11516. { { 0, 1 } },
  11517. .fill_helper = bpf_fill_alu64_rsh_reg_pairs,
  11518. },
  11519. {
  11520. "ALU64_ARSH_X: register combinations",
  11521. { },
  11522. INTERNAL,
  11523. { },
  11524. { { 0, 1 } },
  11525. .fill_helper = bpf_fill_alu64_arsh_reg_pairs,
  11526. },
  11527. {
  11528. "ALU64_ADD_X: register combinations",
  11529. { },
  11530. INTERNAL,
  11531. { },
  11532. { { 0, 1 } },
  11533. .fill_helper = bpf_fill_alu64_add_reg_pairs,
  11534. },
  11535. {
  11536. "ALU64_SUB_X: register combinations",
  11537. { },
  11538. INTERNAL,
  11539. { },
  11540. { { 0, 1 } },
  11541. .fill_helper = bpf_fill_alu64_sub_reg_pairs,
  11542. },
  11543. {
  11544. "ALU64_MUL_X: register combinations",
  11545. { },
  11546. INTERNAL,
  11547. { },
  11548. { { 0, 1 } },
  11549. .fill_helper = bpf_fill_alu64_mul_reg_pairs,
  11550. },
  11551. {
  11552. "ALU64_DIV_X: register combinations",
  11553. { },
  11554. INTERNAL,
  11555. { },
  11556. { { 0, 1 } },
  11557. .fill_helper = bpf_fill_alu64_div_reg_pairs,
  11558. },
  11559. {
  11560. "ALU64_MOD_X: register combinations",
  11561. { },
  11562. INTERNAL,
  11563. { },
  11564. { { 0, 1 } },
  11565. .fill_helper = bpf_fill_alu64_mod_reg_pairs,
  11566. },
  11567. /* ALU32 X register combinations */
  11568. {
  11569. "ALU32_MOV_X: register combinations",
  11570. { },
  11571. INTERNAL,
  11572. { },
  11573. { { 0, 1 } },
  11574. .fill_helper = bpf_fill_alu32_mov_reg_pairs,
  11575. },
  11576. {
  11577. "ALU32_AND_X: register combinations",
  11578. { },
  11579. INTERNAL,
  11580. { },
  11581. { { 0, 1 } },
  11582. .fill_helper = bpf_fill_alu32_and_reg_pairs,
  11583. },
  11584. {
  11585. "ALU32_OR_X: register combinations",
  11586. { },
  11587. INTERNAL,
  11588. { },
  11589. { { 0, 1 } },
  11590. .fill_helper = bpf_fill_alu32_or_reg_pairs,
  11591. },
  11592. {
  11593. "ALU32_XOR_X: register combinations",
  11594. { },
  11595. INTERNAL,
  11596. { },
  11597. { { 0, 1 } },
  11598. .fill_helper = bpf_fill_alu32_xor_reg_pairs,
  11599. },
  11600. {
  11601. "ALU32_LSH_X: register combinations",
  11602. { },
  11603. INTERNAL,
  11604. { },
  11605. { { 0, 1 } },
  11606. .fill_helper = bpf_fill_alu32_lsh_reg_pairs,
  11607. },
  11608. {
  11609. "ALU32_RSH_X: register combinations",
  11610. { },
  11611. INTERNAL,
  11612. { },
  11613. { { 0, 1 } },
  11614. .fill_helper = bpf_fill_alu32_rsh_reg_pairs,
  11615. },
  11616. {
  11617. "ALU32_ARSH_X: register combinations",
  11618. { },
  11619. INTERNAL,
  11620. { },
  11621. { { 0, 1 } },
  11622. .fill_helper = bpf_fill_alu32_arsh_reg_pairs,
  11623. },
  11624. {
  11625. "ALU32_ADD_X: register combinations",
  11626. { },
  11627. INTERNAL,
  11628. { },
  11629. { { 0, 1 } },
  11630. .fill_helper = bpf_fill_alu32_add_reg_pairs,
  11631. },
  11632. {
  11633. "ALU32_SUB_X: register combinations",
  11634. { },
  11635. INTERNAL,
  11636. { },
  11637. { { 0, 1 } },
  11638. .fill_helper = bpf_fill_alu32_sub_reg_pairs,
  11639. },
  11640. {
  11641. "ALU32_MUL_X: register combinations",
  11642. { },
  11643. INTERNAL,
  11644. { },
  11645. { { 0, 1 } },
  11646. .fill_helper = bpf_fill_alu32_mul_reg_pairs,
  11647. },
  11648. {
  11649. "ALU32_DIV_X: register combinations",
  11650. { },
  11651. INTERNAL,
  11652. { },
  11653. { { 0, 1 } },
  11654. .fill_helper = bpf_fill_alu32_div_reg_pairs,
  11655. },
  11656. {
  11657. "ALU32_MOD_X register combinations",
  11658. { },
  11659. INTERNAL,
  11660. { },
  11661. { { 0, 1 } },
  11662. .fill_helper = bpf_fill_alu32_mod_reg_pairs,
  11663. },
  11664. /* Exhaustive test of ALU64 shift operations */
  11665. {
  11666. "ALU64_LSH_K: all shift values",
  11667. { },
  11668. INTERNAL | FLAG_NO_DATA,
  11669. { },
  11670. { { 0, 1 } },
  11671. .fill_helper = bpf_fill_alu64_lsh_imm,
  11672. },
  11673. {
  11674. "ALU64_RSH_K: all shift values",
  11675. { },
  11676. INTERNAL | FLAG_NO_DATA,
  11677. { },
  11678. { { 0, 1 } },
  11679. .fill_helper = bpf_fill_alu64_rsh_imm,
  11680. },
  11681. {
  11682. "ALU64_ARSH_K: all shift values",
  11683. { },
  11684. INTERNAL | FLAG_NO_DATA,
  11685. { },
  11686. { { 0, 1 } },
  11687. .fill_helper = bpf_fill_alu64_arsh_imm,
  11688. },
  11689. {
  11690. "ALU64_LSH_X: all shift values",
  11691. { },
  11692. INTERNAL | FLAG_NO_DATA,
  11693. { },
  11694. { { 0, 1 } },
  11695. .fill_helper = bpf_fill_alu64_lsh_reg,
  11696. },
  11697. {
  11698. "ALU64_RSH_X: all shift values",
  11699. { },
  11700. INTERNAL | FLAG_NO_DATA,
  11701. { },
  11702. { { 0, 1 } },
  11703. .fill_helper = bpf_fill_alu64_rsh_reg,
  11704. },
  11705. {
  11706. "ALU64_ARSH_X: all shift values",
  11707. { },
  11708. INTERNAL | FLAG_NO_DATA,
  11709. { },
  11710. { { 0, 1 } },
  11711. .fill_helper = bpf_fill_alu64_arsh_reg,
  11712. },
  11713. /* Exhaustive test of ALU32 shift operations */
  11714. {
  11715. "ALU32_LSH_K: all shift values",
  11716. { },
  11717. INTERNAL | FLAG_NO_DATA,
  11718. { },
  11719. { { 0, 1 } },
  11720. .fill_helper = bpf_fill_alu32_lsh_imm,
  11721. },
  11722. {
  11723. "ALU32_RSH_K: all shift values",
  11724. { },
  11725. INTERNAL | FLAG_NO_DATA,
  11726. { },
  11727. { { 0, 1 } },
  11728. .fill_helper = bpf_fill_alu32_rsh_imm,
  11729. },
  11730. {
  11731. "ALU32_ARSH_K: all shift values",
  11732. { },
  11733. INTERNAL | FLAG_NO_DATA,
  11734. { },
  11735. { { 0, 1 } },
  11736. .fill_helper = bpf_fill_alu32_arsh_imm,
  11737. },
  11738. {
  11739. "ALU32_LSH_X: all shift values",
  11740. { },
  11741. INTERNAL | FLAG_NO_DATA,
  11742. { },
  11743. { { 0, 1 } },
  11744. .fill_helper = bpf_fill_alu32_lsh_reg,
  11745. },
  11746. {
  11747. "ALU32_RSH_X: all shift values",
  11748. { },
  11749. INTERNAL | FLAG_NO_DATA,
  11750. { },
  11751. { { 0, 1 } },
  11752. .fill_helper = bpf_fill_alu32_rsh_reg,
  11753. },
  11754. {
  11755. "ALU32_ARSH_X: all shift values",
  11756. { },
  11757. INTERNAL | FLAG_NO_DATA,
  11758. { },
  11759. { { 0, 1 } },
  11760. .fill_helper = bpf_fill_alu32_arsh_reg,
  11761. },
  11762. /*
  11763. * Exhaustive test of ALU64 shift operations when
  11764. * source and destination register are the same.
  11765. */
  11766. {
  11767. "ALU64_LSH_X: all shift values with the same register",
  11768. { },
  11769. INTERNAL | FLAG_NO_DATA,
  11770. { },
  11771. { { 0, 1 } },
  11772. .fill_helper = bpf_fill_alu64_lsh_same_reg,
  11773. },
  11774. {
  11775. "ALU64_RSH_X: all shift values with the same register",
  11776. { },
  11777. INTERNAL | FLAG_NO_DATA,
  11778. { },
  11779. { { 0, 1 } },
  11780. .fill_helper = bpf_fill_alu64_rsh_same_reg,
  11781. },
  11782. {
  11783. "ALU64_ARSH_X: all shift values with the same register",
  11784. { },
  11785. INTERNAL | FLAG_NO_DATA,
  11786. { },
  11787. { { 0, 1 } },
  11788. .fill_helper = bpf_fill_alu64_arsh_same_reg,
  11789. },
  11790. /*
  11791. * Exhaustive test of ALU32 shift operations when
  11792. * source and destination register are the same.
  11793. */
  11794. {
  11795. "ALU32_LSH_X: all shift values with the same register",
  11796. { },
  11797. INTERNAL | FLAG_NO_DATA,
  11798. { },
  11799. { { 0, 1 } },
  11800. .fill_helper = bpf_fill_alu32_lsh_same_reg,
  11801. },
  11802. {
  11803. "ALU32_RSH_X: all shift values with the same register",
  11804. { },
  11805. INTERNAL | FLAG_NO_DATA,
  11806. { },
  11807. { { 0, 1 } },
  11808. .fill_helper = bpf_fill_alu32_rsh_same_reg,
  11809. },
  11810. {
  11811. "ALU32_ARSH_X: all shift values with the same register",
  11812. { },
  11813. INTERNAL | FLAG_NO_DATA,
  11814. { },
  11815. { { 0, 1 } },
  11816. .fill_helper = bpf_fill_alu32_arsh_same_reg,
  11817. },
  11818. /* ALU64 immediate magnitudes */
  11819. {
  11820. "ALU64_MOV_K: all immediate value magnitudes",
  11821. { },
  11822. INTERNAL | FLAG_NO_DATA,
  11823. { },
  11824. { { 0, 1 } },
  11825. .fill_helper = bpf_fill_alu64_mov_imm,
  11826. .nr_testruns = NR_PATTERN_RUNS,
  11827. },
  11828. {
  11829. "ALU64_AND_K: all immediate value magnitudes",
  11830. { },
  11831. INTERNAL | FLAG_NO_DATA,
  11832. { },
  11833. { { 0, 1 } },
  11834. .fill_helper = bpf_fill_alu64_and_imm,
  11835. .nr_testruns = NR_PATTERN_RUNS,
  11836. },
  11837. {
  11838. "ALU64_OR_K: all immediate value magnitudes",
  11839. { },
  11840. INTERNAL | FLAG_NO_DATA,
  11841. { },
  11842. { { 0, 1 } },
  11843. .fill_helper = bpf_fill_alu64_or_imm,
  11844. .nr_testruns = NR_PATTERN_RUNS,
  11845. },
  11846. {
  11847. "ALU64_XOR_K: all immediate value magnitudes",
  11848. { },
  11849. INTERNAL | FLAG_NO_DATA,
  11850. { },
  11851. { { 0, 1 } },
  11852. .fill_helper = bpf_fill_alu64_xor_imm,
  11853. .nr_testruns = NR_PATTERN_RUNS,
  11854. },
  11855. {
  11856. "ALU64_ADD_K: all immediate value magnitudes",
  11857. { },
  11858. INTERNAL | FLAG_NO_DATA,
  11859. { },
  11860. { { 0, 1 } },
  11861. .fill_helper = bpf_fill_alu64_add_imm,
  11862. .nr_testruns = NR_PATTERN_RUNS,
  11863. },
  11864. {
  11865. "ALU64_SUB_K: all immediate value magnitudes",
  11866. { },
  11867. INTERNAL | FLAG_NO_DATA,
  11868. { },
  11869. { { 0, 1 } },
  11870. .fill_helper = bpf_fill_alu64_sub_imm,
  11871. .nr_testruns = NR_PATTERN_RUNS,
  11872. },
  11873. {
  11874. "ALU64_MUL_K: all immediate value magnitudes",
  11875. { },
  11876. INTERNAL | FLAG_NO_DATA,
  11877. { },
  11878. { { 0, 1 } },
  11879. .fill_helper = bpf_fill_alu64_mul_imm,
  11880. .nr_testruns = NR_PATTERN_RUNS,
  11881. },
  11882. {
  11883. "ALU64_DIV_K: all immediate value magnitudes",
  11884. { },
  11885. INTERNAL | FLAG_NO_DATA,
  11886. { },
  11887. { { 0, 1 } },
  11888. .fill_helper = bpf_fill_alu64_div_imm,
  11889. .nr_testruns = NR_PATTERN_RUNS,
  11890. },
  11891. {
  11892. "ALU64_MOD_K: all immediate value magnitudes",
  11893. { },
  11894. INTERNAL | FLAG_NO_DATA,
  11895. { },
  11896. { { 0, 1 } },
  11897. .fill_helper = bpf_fill_alu64_mod_imm,
  11898. .nr_testruns = NR_PATTERN_RUNS,
  11899. },
  11900. /* ALU32 immediate magnitudes */
  11901. {
  11902. "ALU32_MOV_K: all immediate value magnitudes",
  11903. { },
  11904. INTERNAL | FLAG_NO_DATA,
  11905. { },
  11906. { { 0, 1 } },
  11907. .fill_helper = bpf_fill_alu32_mov_imm,
  11908. .nr_testruns = NR_PATTERN_RUNS,
  11909. },
  11910. {
  11911. "ALU32_AND_K: all immediate value magnitudes",
  11912. { },
  11913. INTERNAL | FLAG_NO_DATA,
  11914. { },
  11915. { { 0, 1 } },
  11916. .fill_helper = bpf_fill_alu32_and_imm,
  11917. .nr_testruns = NR_PATTERN_RUNS,
  11918. },
  11919. {
  11920. "ALU32_OR_K: all immediate value magnitudes",
  11921. { },
  11922. INTERNAL | FLAG_NO_DATA,
  11923. { },
  11924. { { 0, 1 } },
  11925. .fill_helper = bpf_fill_alu32_or_imm,
  11926. .nr_testruns = NR_PATTERN_RUNS,
  11927. },
  11928. {
  11929. "ALU32_XOR_K: all immediate value magnitudes",
  11930. { },
  11931. INTERNAL | FLAG_NO_DATA,
  11932. { },
  11933. { { 0, 1 } },
  11934. .fill_helper = bpf_fill_alu32_xor_imm,
  11935. .nr_testruns = NR_PATTERN_RUNS,
  11936. },
  11937. {
  11938. "ALU32_ADD_K: all immediate value magnitudes",
  11939. { },
  11940. INTERNAL | FLAG_NO_DATA,
  11941. { },
  11942. { { 0, 1 } },
  11943. .fill_helper = bpf_fill_alu32_add_imm,
  11944. .nr_testruns = NR_PATTERN_RUNS,
  11945. },
  11946. {
  11947. "ALU32_SUB_K: all immediate value magnitudes",
  11948. { },
  11949. INTERNAL | FLAG_NO_DATA,
  11950. { },
  11951. { { 0, 1 } },
  11952. .fill_helper = bpf_fill_alu32_sub_imm,
  11953. .nr_testruns = NR_PATTERN_RUNS,
  11954. },
  11955. {
  11956. "ALU32_MUL_K: all immediate value magnitudes",
  11957. { },
  11958. INTERNAL | FLAG_NO_DATA,
  11959. { },
  11960. { { 0, 1 } },
  11961. .fill_helper = bpf_fill_alu32_mul_imm,
  11962. .nr_testruns = NR_PATTERN_RUNS,
  11963. },
  11964. {
  11965. "ALU32_DIV_K: all immediate value magnitudes",
  11966. { },
  11967. INTERNAL | FLAG_NO_DATA,
  11968. { },
  11969. { { 0, 1 } },
  11970. .fill_helper = bpf_fill_alu32_div_imm,
  11971. .nr_testruns = NR_PATTERN_RUNS,
  11972. },
  11973. {
  11974. "ALU32_MOD_K: all immediate value magnitudes",
  11975. { },
  11976. INTERNAL | FLAG_NO_DATA,
  11977. { },
  11978. { { 0, 1 } },
  11979. .fill_helper = bpf_fill_alu32_mod_imm,
  11980. .nr_testruns = NR_PATTERN_RUNS,
  11981. },
  11982. /* ALU64 register magnitudes */
  11983. {
  11984. "ALU64_MOV_X: all register value magnitudes",
  11985. { },
  11986. INTERNAL | FLAG_NO_DATA,
  11987. { },
  11988. { { 0, 1 } },
  11989. .fill_helper = bpf_fill_alu64_mov_reg,
  11990. .nr_testruns = NR_PATTERN_RUNS,
  11991. },
  11992. {
  11993. "ALU64_AND_X: all register value magnitudes",
  11994. { },
  11995. INTERNAL | FLAG_NO_DATA,
  11996. { },
  11997. { { 0, 1 } },
  11998. .fill_helper = bpf_fill_alu64_and_reg,
  11999. .nr_testruns = NR_PATTERN_RUNS,
  12000. },
  12001. {
  12002. "ALU64_OR_X: all register value magnitudes",
  12003. { },
  12004. INTERNAL | FLAG_NO_DATA,
  12005. { },
  12006. { { 0, 1 } },
  12007. .fill_helper = bpf_fill_alu64_or_reg,
  12008. .nr_testruns = NR_PATTERN_RUNS,
  12009. },
  12010. {
  12011. "ALU64_XOR_X: all register value magnitudes",
  12012. { },
  12013. INTERNAL | FLAG_NO_DATA,
  12014. { },
  12015. { { 0, 1 } },
  12016. .fill_helper = bpf_fill_alu64_xor_reg,
  12017. .nr_testruns = NR_PATTERN_RUNS,
  12018. },
  12019. {
  12020. "ALU64_ADD_X: all register value magnitudes",
  12021. { },
  12022. INTERNAL | FLAG_NO_DATA,
  12023. { },
  12024. { { 0, 1 } },
  12025. .fill_helper = bpf_fill_alu64_add_reg,
  12026. .nr_testruns = NR_PATTERN_RUNS,
  12027. },
  12028. {
  12029. "ALU64_SUB_X: all register value magnitudes",
  12030. { },
  12031. INTERNAL | FLAG_NO_DATA,
  12032. { },
  12033. { { 0, 1 } },
  12034. .fill_helper = bpf_fill_alu64_sub_reg,
  12035. .nr_testruns = NR_PATTERN_RUNS,
  12036. },
  12037. {
  12038. "ALU64_MUL_X: all register value magnitudes",
  12039. { },
  12040. INTERNAL | FLAG_NO_DATA,
  12041. { },
  12042. { { 0, 1 } },
  12043. .fill_helper = bpf_fill_alu64_mul_reg,
  12044. .nr_testruns = NR_PATTERN_RUNS,
  12045. },
  12046. {
  12047. "ALU64_DIV_X: all register value magnitudes",
  12048. { },
  12049. INTERNAL | FLAG_NO_DATA,
  12050. { },
  12051. { { 0, 1 } },
  12052. .fill_helper = bpf_fill_alu64_div_reg,
  12053. .nr_testruns = NR_PATTERN_RUNS,
  12054. },
  12055. {
  12056. "ALU64_MOD_X: all register value magnitudes",
  12057. { },
  12058. INTERNAL | FLAG_NO_DATA,
  12059. { },
  12060. { { 0, 1 } },
  12061. .fill_helper = bpf_fill_alu64_mod_reg,
  12062. .nr_testruns = NR_PATTERN_RUNS,
  12063. },
  12064. /* ALU32 register magnitudes */
  12065. {
  12066. "ALU32_MOV_X: all register value magnitudes",
  12067. { },
  12068. INTERNAL | FLAG_NO_DATA,
  12069. { },
  12070. { { 0, 1 } },
  12071. .fill_helper = bpf_fill_alu32_mov_reg,
  12072. .nr_testruns = NR_PATTERN_RUNS,
  12073. },
  12074. {
  12075. "ALU32_AND_X: all register value magnitudes",
  12076. { },
  12077. INTERNAL | FLAG_NO_DATA,
  12078. { },
  12079. { { 0, 1 } },
  12080. .fill_helper = bpf_fill_alu32_and_reg,
  12081. .nr_testruns = NR_PATTERN_RUNS,
  12082. },
  12083. {
  12084. "ALU32_OR_X: all register value magnitudes",
  12085. { },
  12086. INTERNAL | FLAG_NO_DATA,
  12087. { },
  12088. { { 0, 1 } },
  12089. .fill_helper = bpf_fill_alu32_or_reg,
  12090. .nr_testruns = NR_PATTERN_RUNS,
  12091. },
  12092. {
  12093. "ALU32_XOR_X: all register value magnitudes",
  12094. { },
  12095. INTERNAL | FLAG_NO_DATA,
  12096. { },
  12097. { { 0, 1 } },
  12098. .fill_helper = bpf_fill_alu32_xor_reg,
  12099. .nr_testruns = NR_PATTERN_RUNS,
  12100. },
  12101. {
  12102. "ALU32_ADD_X: all register value magnitudes",
  12103. { },
  12104. INTERNAL | FLAG_NO_DATA,
  12105. { },
  12106. { { 0, 1 } },
  12107. .fill_helper = bpf_fill_alu32_add_reg,
  12108. .nr_testruns = NR_PATTERN_RUNS,
  12109. },
  12110. {
  12111. "ALU32_SUB_X: all register value magnitudes",
  12112. { },
  12113. INTERNAL | FLAG_NO_DATA,
  12114. { },
  12115. { { 0, 1 } },
  12116. .fill_helper = bpf_fill_alu32_sub_reg,
  12117. .nr_testruns = NR_PATTERN_RUNS,
  12118. },
  12119. {
  12120. "ALU32_MUL_X: all register value magnitudes",
  12121. { },
  12122. INTERNAL | FLAG_NO_DATA,
  12123. { },
  12124. { { 0, 1 } },
  12125. .fill_helper = bpf_fill_alu32_mul_reg,
  12126. .nr_testruns = NR_PATTERN_RUNS,
  12127. },
  12128. {
  12129. "ALU32_DIV_X: all register value magnitudes",
  12130. { },
  12131. INTERNAL | FLAG_NO_DATA,
  12132. { },
  12133. { { 0, 1 } },
  12134. .fill_helper = bpf_fill_alu32_div_reg,
  12135. .nr_testruns = NR_PATTERN_RUNS,
  12136. },
  12137. {
  12138. "ALU32_MOD_X: all register value magnitudes",
  12139. { },
  12140. INTERNAL | FLAG_NO_DATA,
  12141. { },
  12142. { { 0, 1 } },
  12143. .fill_helper = bpf_fill_alu32_mod_reg,
  12144. .nr_testruns = NR_PATTERN_RUNS,
  12145. },
  12146. /* LD_IMM64 immediate magnitudes and byte patterns */
  12147. {
  12148. "LD_IMM64: all immediate value magnitudes",
  12149. { },
  12150. INTERNAL | FLAG_NO_DATA,
  12151. { },
  12152. { { 0, 1 } },
  12153. .fill_helper = bpf_fill_ld_imm64_magn,
  12154. },
  12155. {
  12156. "LD_IMM64: checker byte patterns",
  12157. { },
  12158. INTERNAL | FLAG_NO_DATA,
  12159. { },
  12160. { { 0, 1 } },
  12161. .fill_helper = bpf_fill_ld_imm64_checker,
  12162. },
  12163. {
  12164. "LD_IMM64: random positive and zero byte patterns",
  12165. { },
  12166. INTERNAL | FLAG_NO_DATA,
  12167. { },
  12168. { { 0, 1 } },
  12169. .fill_helper = bpf_fill_ld_imm64_pos_zero,
  12170. },
  12171. {
  12172. "LD_IMM64: random negative and zero byte patterns",
  12173. { },
  12174. INTERNAL | FLAG_NO_DATA,
  12175. { },
  12176. { { 0, 1 } },
  12177. .fill_helper = bpf_fill_ld_imm64_neg_zero,
  12178. },
  12179. {
  12180. "LD_IMM64: random positive and negative byte patterns",
  12181. { },
  12182. INTERNAL | FLAG_NO_DATA,
  12183. { },
  12184. { { 0, 1 } },
  12185. .fill_helper = bpf_fill_ld_imm64_pos_neg,
  12186. },
  12187. /* 64-bit ATOMIC register combinations */
  12188. {
  12189. "ATOMIC_DW_ADD: register combinations",
  12190. { },
  12191. INTERNAL,
  12192. { },
  12193. { { 0, 1 } },
  12194. .fill_helper = bpf_fill_atomic64_add_reg_pairs,
  12195. .stack_depth = 8,
  12196. },
  12197. {
  12198. "ATOMIC_DW_AND: register combinations",
  12199. { },
  12200. INTERNAL,
  12201. { },
  12202. { { 0, 1 } },
  12203. .fill_helper = bpf_fill_atomic64_and_reg_pairs,
  12204. .stack_depth = 8,
  12205. },
  12206. {
  12207. "ATOMIC_DW_OR: register combinations",
  12208. { },
  12209. INTERNAL,
  12210. { },
  12211. { { 0, 1 } },
  12212. .fill_helper = bpf_fill_atomic64_or_reg_pairs,
  12213. .stack_depth = 8,
  12214. },
  12215. {
  12216. "ATOMIC_DW_XOR: register combinations",
  12217. { },
  12218. INTERNAL,
  12219. { },
  12220. { { 0, 1 } },
  12221. .fill_helper = bpf_fill_atomic64_xor_reg_pairs,
  12222. .stack_depth = 8,
  12223. },
  12224. {
  12225. "ATOMIC_DW_ADD_FETCH: register combinations",
  12226. { },
  12227. INTERNAL,
  12228. { },
  12229. { { 0, 1 } },
  12230. .fill_helper = bpf_fill_atomic64_add_fetch_reg_pairs,
  12231. .stack_depth = 8,
  12232. },
  12233. {
  12234. "ATOMIC_DW_AND_FETCH: register combinations",
  12235. { },
  12236. INTERNAL,
  12237. { },
  12238. { { 0, 1 } },
  12239. .fill_helper = bpf_fill_atomic64_and_fetch_reg_pairs,
  12240. .stack_depth = 8,
  12241. },
  12242. {
  12243. "ATOMIC_DW_OR_FETCH: register combinations",
  12244. { },
  12245. INTERNAL,
  12246. { },
  12247. { { 0, 1 } },
  12248. .fill_helper = bpf_fill_atomic64_or_fetch_reg_pairs,
  12249. .stack_depth = 8,
  12250. },
  12251. {
  12252. "ATOMIC_DW_XOR_FETCH: register combinations",
  12253. { },
  12254. INTERNAL,
  12255. { },
  12256. { { 0, 1 } },
  12257. .fill_helper = bpf_fill_atomic64_xor_fetch_reg_pairs,
  12258. .stack_depth = 8,
  12259. },
  12260. {
  12261. "ATOMIC_DW_XCHG: register combinations",
  12262. { },
  12263. INTERNAL,
  12264. { },
  12265. { { 0, 1 } },
  12266. .fill_helper = bpf_fill_atomic64_xchg_reg_pairs,
  12267. .stack_depth = 8,
  12268. },
  12269. {
  12270. "ATOMIC_DW_CMPXCHG: register combinations",
  12271. { },
  12272. INTERNAL,
  12273. { },
  12274. { { 0, 1 } },
  12275. .fill_helper = bpf_fill_atomic64_cmpxchg_reg_pairs,
  12276. .stack_depth = 8,
  12277. },
  12278. /* 32-bit ATOMIC register combinations */
  12279. {
  12280. "ATOMIC_W_ADD: register combinations",
  12281. { },
  12282. INTERNAL,
  12283. { },
  12284. { { 0, 1 } },
  12285. .fill_helper = bpf_fill_atomic32_add_reg_pairs,
  12286. .stack_depth = 8,
  12287. },
  12288. {
  12289. "ATOMIC_W_AND: register combinations",
  12290. { },
  12291. INTERNAL,
  12292. { },
  12293. { { 0, 1 } },
  12294. .fill_helper = bpf_fill_atomic32_and_reg_pairs,
  12295. .stack_depth = 8,
  12296. },
  12297. {
  12298. "ATOMIC_W_OR: register combinations",
  12299. { },
  12300. INTERNAL,
  12301. { },
  12302. { { 0, 1 } },
  12303. .fill_helper = bpf_fill_atomic32_or_reg_pairs,
  12304. .stack_depth = 8,
  12305. },
  12306. {
  12307. "ATOMIC_W_XOR: register combinations",
  12308. { },
  12309. INTERNAL,
  12310. { },
  12311. { { 0, 1 } },
  12312. .fill_helper = bpf_fill_atomic32_xor_reg_pairs,
  12313. .stack_depth = 8,
  12314. },
  12315. {
  12316. "ATOMIC_W_ADD_FETCH: register combinations",
  12317. { },
  12318. INTERNAL,
  12319. { },
  12320. { { 0, 1 } },
  12321. .fill_helper = bpf_fill_atomic32_add_fetch_reg_pairs,
  12322. .stack_depth = 8,
  12323. },
  12324. {
  12325. "ATOMIC_W_AND_FETCH: register combinations",
  12326. { },
  12327. INTERNAL,
  12328. { },
  12329. { { 0, 1 } },
  12330. .fill_helper = bpf_fill_atomic32_and_fetch_reg_pairs,
  12331. .stack_depth = 8,
  12332. },
  12333. {
  12334. "ATOMIC_W_OR_FETCH: register combinations",
  12335. { },
  12336. INTERNAL,
  12337. { },
  12338. { { 0, 1 } },
  12339. .fill_helper = bpf_fill_atomic32_or_fetch_reg_pairs,
  12340. .stack_depth = 8,
  12341. },
  12342. {
  12343. "ATOMIC_W_XOR_FETCH: register combinations",
  12344. { },
  12345. INTERNAL,
  12346. { },
  12347. { { 0, 1 } },
  12348. .fill_helper = bpf_fill_atomic32_xor_fetch_reg_pairs,
  12349. .stack_depth = 8,
  12350. },
  12351. {
  12352. "ATOMIC_W_XCHG: register combinations",
  12353. { },
  12354. INTERNAL,
  12355. { },
  12356. { { 0, 1 } },
  12357. .fill_helper = bpf_fill_atomic32_xchg_reg_pairs,
  12358. .stack_depth = 8,
  12359. },
  12360. {
  12361. "ATOMIC_W_CMPXCHG: register combinations",
  12362. { },
  12363. INTERNAL,
  12364. { },
  12365. { { 0, 1 } },
  12366. .fill_helper = bpf_fill_atomic32_cmpxchg_reg_pairs,
  12367. .stack_depth = 8,
  12368. },
  12369. /* 64-bit ATOMIC magnitudes */
  12370. {
  12371. "ATOMIC_DW_ADD: all operand magnitudes",
  12372. { },
  12373. INTERNAL | FLAG_NO_DATA,
  12374. { },
  12375. { { 0, 1 } },
  12376. .fill_helper = bpf_fill_atomic64_add,
  12377. .stack_depth = 8,
  12378. .nr_testruns = NR_PATTERN_RUNS,
  12379. },
  12380. {
  12381. "ATOMIC_DW_AND: all operand magnitudes",
  12382. { },
  12383. INTERNAL | FLAG_NO_DATA,
  12384. { },
  12385. { { 0, 1 } },
  12386. .fill_helper = bpf_fill_atomic64_and,
  12387. .stack_depth = 8,
  12388. .nr_testruns = NR_PATTERN_RUNS,
  12389. },
  12390. {
  12391. "ATOMIC_DW_OR: all operand magnitudes",
  12392. { },
  12393. INTERNAL | FLAG_NO_DATA,
  12394. { },
  12395. { { 0, 1 } },
  12396. .fill_helper = bpf_fill_atomic64_or,
  12397. .stack_depth = 8,
  12398. .nr_testruns = NR_PATTERN_RUNS,
  12399. },
  12400. {
  12401. "ATOMIC_DW_XOR: all operand magnitudes",
  12402. { },
  12403. INTERNAL | FLAG_NO_DATA,
  12404. { },
  12405. { { 0, 1 } },
  12406. .fill_helper = bpf_fill_atomic64_xor,
  12407. .stack_depth = 8,
  12408. .nr_testruns = NR_PATTERN_RUNS,
  12409. },
  12410. {
  12411. "ATOMIC_DW_ADD_FETCH: all operand magnitudes",
  12412. { },
  12413. INTERNAL | FLAG_NO_DATA,
  12414. { },
  12415. { { 0, 1 } },
  12416. .fill_helper = bpf_fill_atomic64_add_fetch,
  12417. .stack_depth = 8,
  12418. .nr_testruns = NR_PATTERN_RUNS,
  12419. },
  12420. {
  12421. "ATOMIC_DW_AND_FETCH: all operand magnitudes",
  12422. { },
  12423. INTERNAL | FLAG_NO_DATA,
  12424. { },
  12425. { { 0, 1 } },
  12426. .fill_helper = bpf_fill_atomic64_and_fetch,
  12427. .stack_depth = 8,
  12428. .nr_testruns = NR_PATTERN_RUNS,
  12429. },
  12430. {
  12431. "ATOMIC_DW_OR_FETCH: all operand magnitudes",
  12432. { },
  12433. INTERNAL | FLAG_NO_DATA,
  12434. { },
  12435. { { 0, 1 } },
  12436. .fill_helper = bpf_fill_atomic64_or_fetch,
  12437. .stack_depth = 8,
  12438. .nr_testruns = NR_PATTERN_RUNS,
  12439. },
  12440. {
  12441. "ATOMIC_DW_XOR_FETCH: all operand magnitudes",
  12442. { },
  12443. INTERNAL | FLAG_NO_DATA,
  12444. { },
  12445. { { 0, 1 } },
  12446. .fill_helper = bpf_fill_atomic64_xor_fetch,
  12447. .stack_depth = 8,
  12448. .nr_testruns = NR_PATTERN_RUNS,
  12449. },
  12450. {
  12451. "ATOMIC_DW_XCHG: all operand magnitudes",
  12452. { },
  12453. INTERNAL | FLAG_NO_DATA,
  12454. { },
  12455. { { 0, 1 } },
  12456. .fill_helper = bpf_fill_atomic64_xchg,
  12457. .stack_depth = 8,
  12458. .nr_testruns = NR_PATTERN_RUNS,
  12459. },
  12460. {
  12461. "ATOMIC_DW_CMPXCHG: all operand magnitudes",
  12462. { },
  12463. INTERNAL | FLAG_NO_DATA,
  12464. { },
  12465. { { 0, 1 } },
  12466. .fill_helper = bpf_fill_cmpxchg64,
  12467. .stack_depth = 8,
  12468. .nr_testruns = NR_PATTERN_RUNS,
  12469. },
  12470. /* 64-bit atomic magnitudes */
  12471. {
  12472. "ATOMIC_W_ADD: all operand magnitudes",
  12473. { },
  12474. INTERNAL | FLAG_NO_DATA,
  12475. { },
  12476. { { 0, 1 } },
  12477. .fill_helper = bpf_fill_atomic32_add,
  12478. .stack_depth = 8,
  12479. .nr_testruns = NR_PATTERN_RUNS,
  12480. },
  12481. {
  12482. "ATOMIC_W_AND: all operand magnitudes",
  12483. { },
  12484. INTERNAL | FLAG_NO_DATA,
  12485. { },
  12486. { { 0, 1 } },
  12487. .fill_helper = bpf_fill_atomic32_and,
  12488. .stack_depth = 8,
  12489. .nr_testruns = NR_PATTERN_RUNS,
  12490. },
  12491. {
  12492. "ATOMIC_W_OR: all operand magnitudes",
  12493. { },
  12494. INTERNAL | FLAG_NO_DATA,
  12495. { },
  12496. { { 0, 1 } },
  12497. .fill_helper = bpf_fill_atomic32_or,
  12498. .stack_depth = 8,
  12499. .nr_testruns = NR_PATTERN_RUNS,
  12500. },
  12501. {
  12502. "ATOMIC_W_XOR: all operand magnitudes",
  12503. { },
  12504. INTERNAL | FLAG_NO_DATA,
  12505. { },
  12506. { { 0, 1 } },
  12507. .fill_helper = bpf_fill_atomic32_xor,
  12508. .stack_depth = 8,
  12509. .nr_testruns = NR_PATTERN_RUNS,
  12510. },
  12511. {
  12512. "ATOMIC_W_ADD_FETCH: all operand magnitudes",
  12513. { },
  12514. INTERNAL | FLAG_NO_DATA,
  12515. { },
  12516. { { 0, 1 } },
  12517. .fill_helper = bpf_fill_atomic32_add_fetch,
  12518. .stack_depth = 8,
  12519. .nr_testruns = NR_PATTERN_RUNS,
  12520. },
  12521. {
  12522. "ATOMIC_W_AND_FETCH: all operand magnitudes",
  12523. { },
  12524. INTERNAL | FLAG_NO_DATA,
  12525. { },
  12526. { { 0, 1 } },
  12527. .fill_helper = bpf_fill_atomic32_and_fetch,
  12528. .stack_depth = 8,
  12529. .nr_testruns = NR_PATTERN_RUNS,
  12530. },
  12531. {
  12532. "ATOMIC_W_OR_FETCH: all operand magnitudes",
  12533. { },
  12534. INTERNAL | FLAG_NO_DATA,
  12535. { },
  12536. { { 0, 1 } },
  12537. .fill_helper = bpf_fill_atomic32_or_fetch,
  12538. .stack_depth = 8,
  12539. .nr_testruns = NR_PATTERN_RUNS,
  12540. },
  12541. {
  12542. "ATOMIC_W_XOR_FETCH: all operand magnitudes",
  12543. { },
  12544. INTERNAL | FLAG_NO_DATA,
  12545. { },
  12546. { { 0, 1 } },
  12547. .fill_helper = bpf_fill_atomic32_xor_fetch,
  12548. .stack_depth = 8,
  12549. .nr_testruns = NR_PATTERN_RUNS,
  12550. },
  12551. {
  12552. "ATOMIC_W_XCHG: all operand magnitudes",
  12553. { },
  12554. INTERNAL | FLAG_NO_DATA,
  12555. { },
  12556. { { 0, 1 } },
  12557. .fill_helper = bpf_fill_atomic32_xchg,
  12558. .stack_depth = 8,
  12559. .nr_testruns = NR_PATTERN_RUNS,
  12560. },
  12561. {
  12562. "ATOMIC_W_CMPXCHG: all operand magnitudes",
  12563. { },
  12564. INTERNAL | FLAG_NO_DATA,
  12565. { },
  12566. { { 0, 1 } },
  12567. .fill_helper = bpf_fill_cmpxchg32,
  12568. .stack_depth = 8,
  12569. .nr_testruns = NR_PATTERN_RUNS,
  12570. },
  12571. /* JMP immediate magnitudes */
  12572. {
  12573. "JMP_JSET_K: all immediate value magnitudes",
  12574. { },
  12575. INTERNAL | FLAG_NO_DATA,
  12576. { },
  12577. { { 0, 1 } },
  12578. .fill_helper = bpf_fill_jmp_jset_imm,
  12579. .nr_testruns = NR_PATTERN_RUNS,
  12580. },
  12581. {
  12582. "JMP_JEQ_K: all immediate value magnitudes",
  12583. { },
  12584. INTERNAL | FLAG_NO_DATA,
  12585. { },
  12586. { { 0, 1 } },
  12587. .fill_helper = bpf_fill_jmp_jeq_imm,
  12588. .nr_testruns = NR_PATTERN_RUNS,
  12589. },
  12590. {
  12591. "JMP_JNE_K: all immediate value magnitudes",
  12592. { },
  12593. INTERNAL | FLAG_NO_DATA,
  12594. { },
  12595. { { 0, 1 } },
  12596. .fill_helper = bpf_fill_jmp_jne_imm,
  12597. .nr_testruns = NR_PATTERN_RUNS,
  12598. },
  12599. {
  12600. "JMP_JGT_K: all immediate value magnitudes",
  12601. { },
  12602. INTERNAL | FLAG_NO_DATA,
  12603. { },
  12604. { { 0, 1 } },
  12605. .fill_helper = bpf_fill_jmp_jgt_imm,
  12606. .nr_testruns = NR_PATTERN_RUNS,
  12607. },
  12608. {
  12609. "JMP_JGE_K: all immediate value magnitudes",
  12610. { },
  12611. INTERNAL | FLAG_NO_DATA,
  12612. { },
  12613. { { 0, 1 } },
  12614. .fill_helper = bpf_fill_jmp_jge_imm,
  12615. .nr_testruns = NR_PATTERN_RUNS,
  12616. },
  12617. {
  12618. "JMP_JLT_K: all immediate value magnitudes",
  12619. { },
  12620. INTERNAL | FLAG_NO_DATA,
  12621. { },
  12622. { { 0, 1 } },
  12623. .fill_helper = bpf_fill_jmp_jlt_imm,
  12624. .nr_testruns = NR_PATTERN_RUNS,
  12625. },
  12626. {
  12627. "JMP_JLE_K: all immediate value magnitudes",
  12628. { },
  12629. INTERNAL | FLAG_NO_DATA,
  12630. { },
  12631. { { 0, 1 } },
  12632. .fill_helper = bpf_fill_jmp_jle_imm,
  12633. .nr_testruns = NR_PATTERN_RUNS,
  12634. },
  12635. {
  12636. "JMP_JSGT_K: all immediate value magnitudes",
  12637. { },
  12638. INTERNAL | FLAG_NO_DATA,
  12639. { },
  12640. { { 0, 1 } },
  12641. .fill_helper = bpf_fill_jmp_jsgt_imm,
  12642. .nr_testruns = NR_PATTERN_RUNS,
  12643. },
  12644. {
  12645. "JMP_JSGE_K: all immediate value magnitudes",
  12646. { },
  12647. INTERNAL | FLAG_NO_DATA,
  12648. { },
  12649. { { 0, 1 } },
  12650. .fill_helper = bpf_fill_jmp_jsge_imm,
  12651. .nr_testruns = NR_PATTERN_RUNS,
  12652. },
  12653. {
  12654. "JMP_JSLT_K: all immediate value magnitudes",
  12655. { },
  12656. INTERNAL | FLAG_NO_DATA,
  12657. { },
  12658. { { 0, 1 } },
  12659. .fill_helper = bpf_fill_jmp_jslt_imm,
  12660. .nr_testruns = NR_PATTERN_RUNS,
  12661. },
  12662. {
  12663. "JMP_JSLE_K: all immediate value magnitudes",
  12664. { },
  12665. INTERNAL | FLAG_NO_DATA,
  12666. { },
  12667. { { 0, 1 } },
  12668. .fill_helper = bpf_fill_jmp_jsle_imm,
  12669. .nr_testruns = NR_PATTERN_RUNS,
  12670. },
  12671. /* JMP register magnitudes */
  12672. {
  12673. "JMP_JSET_X: all register value magnitudes",
  12674. { },
  12675. INTERNAL | FLAG_NO_DATA,
  12676. { },
  12677. { { 0, 1 } },
  12678. .fill_helper = bpf_fill_jmp_jset_reg,
  12679. .nr_testruns = NR_PATTERN_RUNS,
  12680. },
  12681. {
  12682. "JMP_JEQ_X: all register value magnitudes",
  12683. { },
  12684. INTERNAL | FLAG_NO_DATA,
  12685. { },
  12686. { { 0, 1 } },
  12687. .fill_helper = bpf_fill_jmp_jeq_reg,
  12688. .nr_testruns = NR_PATTERN_RUNS,
  12689. },
  12690. {
  12691. "JMP_JNE_X: all register value magnitudes",
  12692. { },
  12693. INTERNAL | FLAG_NO_DATA,
  12694. { },
  12695. { { 0, 1 } },
  12696. .fill_helper = bpf_fill_jmp_jne_reg,
  12697. .nr_testruns = NR_PATTERN_RUNS,
  12698. },
  12699. {
  12700. "JMP_JGT_X: all register value magnitudes",
  12701. { },
  12702. INTERNAL | FLAG_NO_DATA,
  12703. { },
  12704. { { 0, 1 } },
  12705. .fill_helper = bpf_fill_jmp_jgt_reg,
  12706. .nr_testruns = NR_PATTERN_RUNS,
  12707. },
  12708. {
  12709. "JMP_JGE_X: all register value magnitudes",
  12710. { },
  12711. INTERNAL | FLAG_NO_DATA,
  12712. { },
  12713. { { 0, 1 } },
  12714. .fill_helper = bpf_fill_jmp_jge_reg,
  12715. .nr_testruns = NR_PATTERN_RUNS,
  12716. },
  12717. {
  12718. "JMP_JLT_X: all register value magnitudes",
  12719. { },
  12720. INTERNAL | FLAG_NO_DATA,
  12721. { },
  12722. { { 0, 1 } },
  12723. .fill_helper = bpf_fill_jmp_jlt_reg,
  12724. .nr_testruns = NR_PATTERN_RUNS,
  12725. },
  12726. {
  12727. "JMP_JLE_X: all register value magnitudes",
  12728. { },
  12729. INTERNAL | FLAG_NO_DATA,
  12730. { },
  12731. { { 0, 1 } },
  12732. .fill_helper = bpf_fill_jmp_jle_reg,
  12733. .nr_testruns = NR_PATTERN_RUNS,
  12734. },
  12735. {
  12736. "JMP_JSGT_X: all register value magnitudes",
  12737. { },
  12738. INTERNAL | FLAG_NO_DATA,
  12739. { },
  12740. { { 0, 1 } },
  12741. .fill_helper = bpf_fill_jmp_jsgt_reg,
  12742. .nr_testruns = NR_PATTERN_RUNS,
  12743. },
  12744. {
  12745. "JMP_JSGE_X: all register value magnitudes",
  12746. { },
  12747. INTERNAL | FLAG_NO_DATA,
  12748. { },
  12749. { { 0, 1 } },
  12750. .fill_helper = bpf_fill_jmp_jsge_reg,
  12751. .nr_testruns = NR_PATTERN_RUNS,
  12752. },
  12753. {
  12754. "JMP_JSLT_X: all register value magnitudes",
  12755. { },
  12756. INTERNAL | FLAG_NO_DATA,
  12757. { },
  12758. { { 0, 1 } },
  12759. .fill_helper = bpf_fill_jmp_jslt_reg,
  12760. .nr_testruns = NR_PATTERN_RUNS,
  12761. },
  12762. {
  12763. "JMP_JSLE_X: all register value magnitudes",
  12764. { },
  12765. INTERNAL | FLAG_NO_DATA,
  12766. { },
  12767. { { 0, 1 } },
  12768. .fill_helper = bpf_fill_jmp_jsle_reg,
  12769. .nr_testruns = NR_PATTERN_RUNS,
  12770. },
  12771. /* JMP32 immediate magnitudes */
  12772. {
  12773. "JMP32_JSET_K: all immediate value magnitudes",
  12774. { },
  12775. INTERNAL | FLAG_NO_DATA,
  12776. { },
  12777. { { 0, 1 } },
  12778. .fill_helper = bpf_fill_jmp32_jset_imm,
  12779. .nr_testruns = NR_PATTERN_RUNS,
  12780. },
  12781. {
  12782. "JMP32_JEQ_K: all immediate value magnitudes",
  12783. { },
  12784. INTERNAL | FLAG_NO_DATA,
  12785. { },
  12786. { { 0, 1 } },
  12787. .fill_helper = bpf_fill_jmp32_jeq_imm,
  12788. .nr_testruns = NR_PATTERN_RUNS,
  12789. },
  12790. {
  12791. "JMP32_JNE_K: all immediate value magnitudes",
  12792. { },
  12793. INTERNAL | FLAG_NO_DATA,
  12794. { },
  12795. { { 0, 1 } },
  12796. .fill_helper = bpf_fill_jmp32_jne_imm,
  12797. .nr_testruns = NR_PATTERN_RUNS,
  12798. },
  12799. {
  12800. "JMP32_JGT_K: all immediate value magnitudes",
  12801. { },
  12802. INTERNAL | FLAG_NO_DATA,
  12803. { },
  12804. { { 0, 1 } },
  12805. .fill_helper = bpf_fill_jmp32_jgt_imm,
  12806. .nr_testruns = NR_PATTERN_RUNS,
  12807. },
  12808. {
  12809. "JMP32_JGE_K: all immediate value magnitudes",
  12810. { },
  12811. INTERNAL | FLAG_NO_DATA,
  12812. { },
  12813. { { 0, 1 } },
  12814. .fill_helper = bpf_fill_jmp32_jge_imm,
  12815. .nr_testruns = NR_PATTERN_RUNS,
  12816. },
  12817. {
  12818. "JMP32_JLT_K: all immediate value magnitudes",
  12819. { },
  12820. INTERNAL | FLAG_NO_DATA,
  12821. { },
  12822. { { 0, 1 } },
  12823. .fill_helper = bpf_fill_jmp32_jlt_imm,
  12824. .nr_testruns = NR_PATTERN_RUNS,
  12825. },
  12826. {
  12827. "JMP32_JLE_K: all immediate value magnitudes",
  12828. { },
  12829. INTERNAL | FLAG_NO_DATA,
  12830. { },
  12831. { { 0, 1 } },
  12832. .fill_helper = bpf_fill_jmp32_jle_imm,
  12833. .nr_testruns = NR_PATTERN_RUNS,
  12834. },
  12835. {
  12836. "JMP32_JSGT_K: all immediate value magnitudes",
  12837. { },
  12838. INTERNAL | FLAG_NO_DATA,
  12839. { },
  12840. { { 0, 1 } },
  12841. .fill_helper = bpf_fill_jmp32_jsgt_imm,
  12842. .nr_testruns = NR_PATTERN_RUNS,
  12843. },
  12844. {
  12845. "JMP32_JSGE_K: all immediate value magnitudes",
  12846. { },
  12847. INTERNAL | FLAG_NO_DATA,
  12848. { },
  12849. { { 0, 1 } },
  12850. .fill_helper = bpf_fill_jmp32_jsge_imm,
  12851. .nr_testruns = NR_PATTERN_RUNS,
  12852. },
  12853. {
  12854. "JMP32_JSLT_K: all immediate value magnitudes",
  12855. { },
  12856. INTERNAL | FLAG_NO_DATA,
  12857. { },
  12858. { { 0, 1 } },
  12859. .fill_helper = bpf_fill_jmp32_jslt_imm,
  12860. .nr_testruns = NR_PATTERN_RUNS,
  12861. },
  12862. {
  12863. "JMP32_JSLE_K: all immediate value magnitudes",
  12864. { },
  12865. INTERNAL | FLAG_NO_DATA,
  12866. { },
  12867. { { 0, 1 } },
  12868. .fill_helper = bpf_fill_jmp32_jsle_imm,
  12869. .nr_testruns = NR_PATTERN_RUNS,
  12870. },
  12871. /* JMP32 register magnitudes */
  12872. {
  12873. "JMP32_JSET_X: all register value magnitudes",
  12874. { },
  12875. INTERNAL | FLAG_NO_DATA,
  12876. { },
  12877. { { 0, 1 } },
  12878. .fill_helper = bpf_fill_jmp32_jset_reg,
  12879. .nr_testruns = NR_PATTERN_RUNS,
  12880. },
  12881. {
  12882. "JMP32_JEQ_X: all register value magnitudes",
  12883. { },
  12884. INTERNAL | FLAG_NO_DATA,
  12885. { },
  12886. { { 0, 1 } },
  12887. .fill_helper = bpf_fill_jmp32_jeq_reg,
  12888. .nr_testruns = NR_PATTERN_RUNS,
  12889. },
  12890. {
  12891. "JMP32_JNE_X: all register value magnitudes",
  12892. { },
  12893. INTERNAL | FLAG_NO_DATA,
  12894. { },
  12895. { { 0, 1 } },
  12896. .fill_helper = bpf_fill_jmp32_jne_reg,
  12897. .nr_testruns = NR_PATTERN_RUNS,
  12898. },
  12899. {
  12900. "JMP32_JGT_X: all register value magnitudes",
  12901. { },
  12902. INTERNAL | FLAG_NO_DATA,
  12903. { },
  12904. { { 0, 1 } },
  12905. .fill_helper = bpf_fill_jmp32_jgt_reg,
  12906. .nr_testruns = NR_PATTERN_RUNS,
  12907. },
  12908. {
  12909. "JMP32_JGE_X: all register value magnitudes",
  12910. { },
  12911. INTERNAL | FLAG_NO_DATA,
  12912. { },
  12913. { { 0, 1 } },
  12914. .fill_helper = bpf_fill_jmp32_jge_reg,
  12915. .nr_testruns = NR_PATTERN_RUNS,
  12916. },
  12917. {
  12918. "JMP32_JLT_X: all register value magnitudes",
  12919. { },
  12920. INTERNAL | FLAG_NO_DATA,
  12921. { },
  12922. { { 0, 1 } },
  12923. .fill_helper = bpf_fill_jmp32_jlt_reg,
  12924. .nr_testruns = NR_PATTERN_RUNS,
  12925. },
  12926. {
  12927. "JMP32_JLE_X: all register value magnitudes",
  12928. { },
  12929. INTERNAL | FLAG_NO_DATA,
  12930. { },
  12931. { { 0, 1 } },
  12932. .fill_helper = bpf_fill_jmp32_jle_reg,
  12933. .nr_testruns = NR_PATTERN_RUNS,
  12934. },
  12935. {
  12936. "JMP32_JSGT_X: all register value magnitudes",
  12937. { },
  12938. INTERNAL | FLAG_NO_DATA,
  12939. { },
  12940. { { 0, 1 } },
  12941. .fill_helper = bpf_fill_jmp32_jsgt_reg,
  12942. .nr_testruns = NR_PATTERN_RUNS,
  12943. },
  12944. {
  12945. "JMP32_JSGE_X: all register value magnitudes",
  12946. { },
  12947. INTERNAL | FLAG_NO_DATA,
  12948. { },
  12949. { { 0, 1 } },
  12950. .fill_helper = bpf_fill_jmp32_jsge_reg,
  12951. .nr_testruns = NR_PATTERN_RUNS,
  12952. },
  12953. {
  12954. "JMP32_JSLT_X: all register value magnitudes",
  12955. { },
  12956. INTERNAL | FLAG_NO_DATA,
  12957. { },
  12958. { { 0, 1 } },
  12959. .fill_helper = bpf_fill_jmp32_jslt_reg,
  12960. .nr_testruns = NR_PATTERN_RUNS,
  12961. },
  12962. {
  12963. "JMP32_JSLE_X: all register value magnitudes",
  12964. { },
  12965. INTERNAL | FLAG_NO_DATA,
  12966. { },
  12967. { { 0, 1 } },
  12968. .fill_helper = bpf_fill_jmp32_jsle_reg,
  12969. .nr_testruns = NR_PATTERN_RUNS,
  12970. },
  12971. /* Conditional jumps with constant decision */
  12972. {
  12973. "JMP_JSET_K: imm = 0 -> never taken",
  12974. .u.insns_int = {
  12975. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  12976. BPF_JMP_IMM(BPF_JSET, R1, 0, 1),
  12977. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  12978. BPF_EXIT_INSN(),
  12979. },
  12980. INTERNAL | FLAG_NO_DATA,
  12981. { },
  12982. { { 0, 0 } },
  12983. },
  12984. {
  12985. "JMP_JLT_K: imm = 0 -> never taken",
  12986. .u.insns_int = {
  12987. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  12988. BPF_JMP_IMM(BPF_JLT, R1, 0, 1),
  12989. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  12990. BPF_EXIT_INSN(),
  12991. },
  12992. INTERNAL | FLAG_NO_DATA,
  12993. { },
  12994. { { 0, 0 } },
  12995. },
  12996. {
  12997. "JMP_JGE_K: imm = 0 -> always taken",
  12998. .u.insns_int = {
  12999. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13000. BPF_JMP_IMM(BPF_JGE, R1, 0, 1),
  13001. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13002. BPF_EXIT_INSN(),
  13003. },
  13004. INTERNAL | FLAG_NO_DATA,
  13005. { },
  13006. { { 0, 1 } },
  13007. },
  13008. {
  13009. "JMP_JGT_K: imm = 0xffffffff -> never taken",
  13010. .u.insns_int = {
  13011. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13012. BPF_JMP_IMM(BPF_JGT, R1, U32_MAX, 1),
  13013. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13014. BPF_EXIT_INSN(),
  13015. },
  13016. INTERNAL | FLAG_NO_DATA,
  13017. { },
  13018. { { 0, 0 } },
  13019. },
  13020. {
  13021. "JMP_JLE_K: imm = 0xffffffff -> always taken",
  13022. .u.insns_int = {
  13023. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13024. BPF_JMP_IMM(BPF_JLE, R1, U32_MAX, 1),
  13025. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13026. BPF_EXIT_INSN(),
  13027. },
  13028. INTERNAL | FLAG_NO_DATA,
  13029. { },
  13030. { { 0, 1 } },
  13031. },
  13032. {
  13033. "JMP32_JSGT_K: imm = 0x7fffffff -> never taken",
  13034. .u.insns_int = {
  13035. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13036. BPF_JMP32_IMM(BPF_JSGT, R1, S32_MAX, 1),
  13037. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13038. BPF_EXIT_INSN(),
  13039. },
  13040. INTERNAL | FLAG_NO_DATA,
  13041. { },
  13042. { { 0, 0 } },
  13043. },
  13044. {
  13045. "JMP32_JSGE_K: imm = -0x80000000 -> always taken",
  13046. .u.insns_int = {
  13047. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13048. BPF_JMP32_IMM(BPF_JSGE, R1, S32_MIN, 1),
  13049. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13050. BPF_EXIT_INSN(),
  13051. },
  13052. INTERNAL | FLAG_NO_DATA,
  13053. { },
  13054. { { 0, 1 } },
  13055. },
  13056. {
  13057. "JMP32_JSLT_K: imm = -0x80000000 -> never taken",
  13058. .u.insns_int = {
  13059. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13060. BPF_JMP32_IMM(BPF_JSLT, R1, S32_MIN, 1),
  13061. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13062. BPF_EXIT_INSN(),
  13063. },
  13064. INTERNAL | FLAG_NO_DATA,
  13065. { },
  13066. { { 0, 0 } },
  13067. },
  13068. {
  13069. "JMP32_JSLE_K: imm = 0x7fffffff -> always taken",
  13070. .u.insns_int = {
  13071. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13072. BPF_JMP32_IMM(BPF_JSLE, R1, S32_MAX, 1),
  13073. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13074. BPF_EXIT_INSN(),
  13075. },
  13076. INTERNAL | FLAG_NO_DATA,
  13077. { },
  13078. { { 0, 1 } },
  13079. },
  13080. {
  13081. "JMP_JEQ_X: dst = src -> always taken",
  13082. .u.insns_int = {
  13083. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13084. BPF_JMP_REG(BPF_JEQ, R1, R1, 1),
  13085. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13086. BPF_EXIT_INSN(),
  13087. },
  13088. INTERNAL | FLAG_NO_DATA,
  13089. { },
  13090. { { 0, 1 } },
  13091. },
  13092. {
  13093. "JMP_JGE_X: dst = src -> always taken",
  13094. .u.insns_int = {
  13095. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13096. BPF_JMP_REG(BPF_JGE, R1, R1, 1),
  13097. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13098. BPF_EXIT_INSN(),
  13099. },
  13100. INTERNAL | FLAG_NO_DATA,
  13101. { },
  13102. { { 0, 1 } },
  13103. },
  13104. {
  13105. "JMP_JLE_X: dst = src -> always taken",
  13106. .u.insns_int = {
  13107. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13108. BPF_JMP_REG(BPF_JLE, R1, R1, 1),
  13109. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13110. BPF_EXIT_INSN(),
  13111. },
  13112. INTERNAL | FLAG_NO_DATA,
  13113. { },
  13114. { { 0, 1 } },
  13115. },
  13116. {
  13117. "JMP_JSGE_X: dst = src -> always taken",
  13118. .u.insns_int = {
  13119. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13120. BPF_JMP_REG(BPF_JSGE, R1, R1, 1),
  13121. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13122. BPF_EXIT_INSN(),
  13123. },
  13124. INTERNAL | FLAG_NO_DATA,
  13125. { },
  13126. { { 0, 1 } },
  13127. },
  13128. {
  13129. "JMP_JSLE_X: dst = src -> always taken",
  13130. .u.insns_int = {
  13131. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13132. BPF_JMP_REG(BPF_JSLE, R1, R1, 1),
  13133. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13134. BPF_EXIT_INSN(),
  13135. },
  13136. INTERNAL | FLAG_NO_DATA,
  13137. { },
  13138. { { 0, 1 } },
  13139. },
  13140. {
  13141. "JMP_JNE_X: dst = src -> never taken",
  13142. .u.insns_int = {
  13143. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13144. BPF_JMP_REG(BPF_JNE, R1, R1, 1),
  13145. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13146. BPF_EXIT_INSN(),
  13147. },
  13148. INTERNAL | FLAG_NO_DATA,
  13149. { },
  13150. { { 0, 0 } },
  13151. },
  13152. {
  13153. "JMP_JGT_X: dst = src -> never taken",
  13154. .u.insns_int = {
  13155. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13156. BPF_JMP_REG(BPF_JGT, R1, R1, 1),
  13157. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13158. BPF_EXIT_INSN(),
  13159. },
  13160. INTERNAL | FLAG_NO_DATA,
  13161. { },
  13162. { { 0, 0 } },
  13163. },
  13164. {
  13165. "JMP_JLT_X: dst = src -> never taken",
  13166. .u.insns_int = {
  13167. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13168. BPF_JMP_REG(BPF_JLT, R1, R1, 1),
  13169. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13170. BPF_EXIT_INSN(),
  13171. },
  13172. INTERNAL | FLAG_NO_DATA,
  13173. { },
  13174. { { 0, 0 } },
  13175. },
  13176. {
  13177. "JMP_JSGT_X: dst = src -> never taken",
  13178. .u.insns_int = {
  13179. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13180. BPF_JMP_REG(BPF_JSGT, R1, R1, 1),
  13181. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13182. BPF_EXIT_INSN(),
  13183. },
  13184. INTERNAL | FLAG_NO_DATA,
  13185. { },
  13186. { { 0, 0 } },
  13187. },
  13188. {
  13189. "JMP_JSLT_X: dst = src -> never taken",
  13190. .u.insns_int = {
  13191. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13192. BPF_JMP_REG(BPF_JSLT, R1, R1, 1),
  13193. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13194. BPF_EXIT_INSN(),
  13195. },
  13196. INTERNAL | FLAG_NO_DATA,
  13197. { },
  13198. { { 0, 0 } },
  13199. },
  13200. /* Short relative jumps */
  13201. {
  13202. "Short relative jump: offset=0",
  13203. .u.insns_int = {
  13204. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13205. BPF_JMP_IMM(BPF_JEQ, R0, 0, 0),
  13206. BPF_EXIT_INSN(),
  13207. BPF_ALU32_IMM(BPF_MOV, R0, -1),
  13208. },
  13209. INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
  13210. { },
  13211. { { 0, 0 } },
  13212. },
  13213. {
  13214. "Short relative jump: offset=1",
  13215. .u.insns_int = {
  13216. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13217. BPF_JMP_IMM(BPF_JEQ, R0, 0, 1),
  13218. BPF_ALU32_IMM(BPF_ADD, R0, 1),
  13219. BPF_EXIT_INSN(),
  13220. BPF_ALU32_IMM(BPF_MOV, R0, -1),
  13221. },
  13222. INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
  13223. { },
  13224. { { 0, 0 } },
  13225. },
  13226. {
  13227. "Short relative jump: offset=2",
  13228. .u.insns_int = {
  13229. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13230. BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
  13231. BPF_ALU32_IMM(BPF_ADD, R0, 1),
  13232. BPF_ALU32_IMM(BPF_ADD, R0, 1),
  13233. BPF_EXIT_INSN(),
  13234. BPF_ALU32_IMM(BPF_MOV, R0, -1),
  13235. },
  13236. INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
  13237. { },
  13238. { { 0, 0 } },
  13239. },
  13240. {
  13241. "Short relative jump: offset=3",
  13242. .u.insns_int = {
  13243. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13244. BPF_JMP_IMM(BPF_JEQ, R0, 0, 3),
  13245. BPF_ALU32_IMM(BPF_ADD, R0, 1),
  13246. BPF_ALU32_IMM(BPF_ADD, R0, 1),
  13247. BPF_ALU32_IMM(BPF_ADD, R0, 1),
  13248. BPF_EXIT_INSN(),
  13249. BPF_ALU32_IMM(BPF_MOV, R0, -1),
  13250. },
  13251. INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
  13252. { },
  13253. { { 0, 0 } },
  13254. },
  13255. {
  13256. "Short relative jump: offset=4",
  13257. .u.insns_int = {
  13258. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13259. BPF_JMP_IMM(BPF_JEQ, R0, 0, 4),
  13260. BPF_ALU32_IMM(BPF_ADD, R0, 1),
  13261. BPF_ALU32_IMM(BPF_ADD, R0, 1),
  13262. BPF_ALU32_IMM(BPF_ADD, R0, 1),
  13263. BPF_ALU32_IMM(BPF_ADD, R0, 1),
  13264. BPF_EXIT_INSN(),
  13265. BPF_ALU32_IMM(BPF_MOV, R0, -1),
  13266. },
  13267. INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
  13268. { },
  13269. { { 0, 0 } },
  13270. },
  13271. /* Conditional branch conversions */
  13272. {
  13273. "Long conditional jump: taken at runtime",
  13274. { },
  13275. INTERNAL | FLAG_NO_DATA,
  13276. { },
  13277. { { 0, 1 } },
  13278. .fill_helper = bpf_fill_max_jmp_taken,
  13279. },
  13280. {
  13281. "Long conditional jump: not taken at runtime",
  13282. { },
  13283. INTERNAL | FLAG_NO_DATA,
  13284. { },
  13285. { { 0, 2 } },
  13286. .fill_helper = bpf_fill_max_jmp_not_taken,
  13287. },
  13288. {
  13289. "Long conditional jump: always taken, known at JIT time",
  13290. { },
  13291. INTERNAL | FLAG_NO_DATA,
  13292. { },
  13293. { { 0, 1 } },
  13294. .fill_helper = bpf_fill_max_jmp_always_taken,
  13295. },
  13296. {
  13297. "Long conditional jump: never taken, known at JIT time",
  13298. { },
  13299. INTERNAL | FLAG_NO_DATA,
  13300. { },
  13301. { { 0, 2 } },
  13302. .fill_helper = bpf_fill_max_jmp_never_taken,
  13303. },
  13304. /* Staggered jump sequences, immediate */
  13305. {
  13306. "Staggered jumps: JMP_JA",
  13307. { },
  13308. INTERNAL | FLAG_NO_DATA,
  13309. { },
  13310. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13311. .fill_helper = bpf_fill_staggered_ja,
  13312. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13313. },
  13314. {
  13315. "Staggered jumps: JMP_JEQ_K",
  13316. { },
  13317. INTERNAL | FLAG_NO_DATA,
  13318. { },
  13319. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13320. .fill_helper = bpf_fill_staggered_jeq_imm,
  13321. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13322. },
  13323. {
  13324. "Staggered jumps: JMP_JNE_K",
  13325. { },
  13326. INTERNAL | FLAG_NO_DATA,
  13327. { },
  13328. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13329. .fill_helper = bpf_fill_staggered_jne_imm,
  13330. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13331. },
  13332. {
  13333. "Staggered jumps: JMP_JSET_K",
  13334. { },
  13335. INTERNAL | FLAG_NO_DATA,
  13336. { },
  13337. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13338. .fill_helper = bpf_fill_staggered_jset_imm,
  13339. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13340. },
  13341. {
  13342. "Staggered jumps: JMP_JGT_K",
  13343. { },
  13344. INTERNAL | FLAG_NO_DATA,
  13345. { },
  13346. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13347. .fill_helper = bpf_fill_staggered_jgt_imm,
  13348. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13349. },
  13350. {
  13351. "Staggered jumps: JMP_JGE_K",
  13352. { },
  13353. INTERNAL | FLAG_NO_DATA,
  13354. { },
  13355. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13356. .fill_helper = bpf_fill_staggered_jge_imm,
  13357. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13358. },
  13359. {
  13360. "Staggered jumps: JMP_JLT_K",
  13361. { },
  13362. INTERNAL | FLAG_NO_DATA,
  13363. { },
  13364. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13365. .fill_helper = bpf_fill_staggered_jlt_imm,
  13366. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13367. },
  13368. {
  13369. "Staggered jumps: JMP_JLE_K",
  13370. { },
  13371. INTERNAL | FLAG_NO_DATA,
  13372. { },
  13373. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13374. .fill_helper = bpf_fill_staggered_jle_imm,
  13375. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13376. },
  13377. {
  13378. "Staggered jumps: JMP_JSGT_K",
  13379. { },
  13380. INTERNAL | FLAG_NO_DATA,
  13381. { },
  13382. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13383. .fill_helper = bpf_fill_staggered_jsgt_imm,
  13384. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13385. },
  13386. {
  13387. "Staggered jumps: JMP_JSGE_K",
  13388. { },
  13389. INTERNAL | FLAG_NO_DATA,
  13390. { },
  13391. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13392. .fill_helper = bpf_fill_staggered_jsge_imm,
  13393. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13394. },
  13395. {
  13396. "Staggered jumps: JMP_JSLT_K",
  13397. { },
  13398. INTERNAL | FLAG_NO_DATA,
  13399. { },
  13400. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13401. .fill_helper = bpf_fill_staggered_jslt_imm,
  13402. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13403. },
  13404. {
  13405. "Staggered jumps: JMP_JSLE_K",
  13406. { },
  13407. INTERNAL | FLAG_NO_DATA,
  13408. { },
  13409. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13410. .fill_helper = bpf_fill_staggered_jsle_imm,
  13411. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13412. },
  13413. /* Staggered jump sequences, register */
  13414. {
  13415. "Staggered jumps: JMP_JEQ_X",
  13416. { },
  13417. INTERNAL | FLAG_NO_DATA,
  13418. { },
  13419. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13420. .fill_helper = bpf_fill_staggered_jeq_reg,
  13421. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13422. },
  13423. {
  13424. "Staggered jumps: JMP_JNE_X",
  13425. { },
  13426. INTERNAL | FLAG_NO_DATA,
  13427. { },
  13428. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13429. .fill_helper = bpf_fill_staggered_jne_reg,
  13430. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13431. },
  13432. {
  13433. "Staggered jumps: JMP_JSET_X",
  13434. { },
  13435. INTERNAL | FLAG_NO_DATA,
  13436. { },
  13437. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13438. .fill_helper = bpf_fill_staggered_jset_reg,
  13439. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13440. },
  13441. {
  13442. "Staggered jumps: JMP_JGT_X",
  13443. { },
  13444. INTERNAL | FLAG_NO_DATA,
  13445. { },
  13446. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13447. .fill_helper = bpf_fill_staggered_jgt_reg,
  13448. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13449. },
  13450. {
  13451. "Staggered jumps: JMP_JGE_X",
  13452. { },
  13453. INTERNAL | FLAG_NO_DATA,
  13454. { },
  13455. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13456. .fill_helper = bpf_fill_staggered_jge_reg,
  13457. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13458. },
  13459. {
  13460. "Staggered jumps: JMP_JLT_X",
  13461. { },
  13462. INTERNAL | FLAG_NO_DATA,
  13463. { },
  13464. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13465. .fill_helper = bpf_fill_staggered_jlt_reg,
  13466. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13467. },
  13468. {
  13469. "Staggered jumps: JMP_JLE_X",
  13470. { },
  13471. INTERNAL | FLAG_NO_DATA,
  13472. { },
  13473. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13474. .fill_helper = bpf_fill_staggered_jle_reg,
  13475. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13476. },
  13477. {
  13478. "Staggered jumps: JMP_JSGT_X",
  13479. { },
  13480. INTERNAL | FLAG_NO_DATA,
  13481. { },
  13482. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13483. .fill_helper = bpf_fill_staggered_jsgt_reg,
  13484. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13485. },
  13486. {
  13487. "Staggered jumps: JMP_JSGE_X",
  13488. { },
  13489. INTERNAL | FLAG_NO_DATA,
  13490. { },
  13491. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13492. .fill_helper = bpf_fill_staggered_jsge_reg,
  13493. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13494. },
  13495. {
  13496. "Staggered jumps: JMP_JSLT_X",
  13497. { },
  13498. INTERNAL | FLAG_NO_DATA,
  13499. { },
  13500. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13501. .fill_helper = bpf_fill_staggered_jslt_reg,
  13502. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13503. },
  13504. {
  13505. "Staggered jumps: JMP_JSLE_X",
  13506. { },
  13507. INTERNAL | FLAG_NO_DATA,
  13508. { },
  13509. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13510. .fill_helper = bpf_fill_staggered_jsle_reg,
  13511. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13512. },
  13513. /* Staggered jump sequences, JMP32 immediate */
  13514. {
  13515. "Staggered jumps: JMP32_JEQ_K",
  13516. { },
  13517. INTERNAL | FLAG_NO_DATA,
  13518. { },
  13519. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13520. .fill_helper = bpf_fill_staggered_jeq32_imm,
  13521. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13522. },
  13523. {
  13524. "Staggered jumps: JMP32_JNE_K",
  13525. { },
  13526. INTERNAL | FLAG_NO_DATA,
  13527. { },
  13528. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13529. .fill_helper = bpf_fill_staggered_jne32_imm,
  13530. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13531. },
  13532. {
  13533. "Staggered jumps: JMP32_JSET_K",
  13534. { },
  13535. INTERNAL | FLAG_NO_DATA,
  13536. { },
  13537. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13538. .fill_helper = bpf_fill_staggered_jset32_imm,
  13539. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13540. },
  13541. {
  13542. "Staggered jumps: JMP32_JGT_K",
  13543. { },
  13544. INTERNAL | FLAG_NO_DATA,
  13545. { },
  13546. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13547. .fill_helper = bpf_fill_staggered_jgt32_imm,
  13548. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13549. },
  13550. {
  13551. "Staggered jumps: JMP32_JGE_K",
  13552. { },
  13553. INTERNAL | FLAG_NO_DATA,
  13554. { },
  13555. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13556. .fill_helper = bpf_fill_staggered_jge32_imm,
  13557. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13558. },
  13559. {
  13560. "Staggered jumps: JMP32_JLT_K",
  13561. { },
  13562. INTERNAL | FLAG_NO_DATA,
  13563. { },
  13564. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13565. .fill_helper = bpf_fill_staggered_jlt32_imm,
  13566. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13567. },
  13568. {
  13569. "Staggered jumps: JMP32_JLE_K",
  13570. { },
  13571. INTERNAL | FLAG_NO_DATA,
  13572. { },
  13573. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13574. .fill_helper = bpf_fill_staggered_jle32_imm,
  13575. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13576. },
  13577. {
  13578. "Staggered jumps: JMP32_JSGT_K",
  13579. { },
  13580. INTERNAL | FLAG_NO_DATA,
  13581. { },
  13582. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13583. .fill_helper = bpf_fill_staggered_jsgt32_imm,
  13584. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13585. },
  13586. {
  13587. "Staggered jumps: JMP32_JSGE_K",
  13588. { },
  13589. INTERNAL | FLAG_NO_DATA,
  13590. { },
  13591. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13592. .fill_helper = bpf_fill_staggered_jsge32_imm,
  13593. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13594. },
  13595. {
  13596. "Staggered jumps: JMP32_JSLT_K",
  13597. { },
  13598. INTERNAL | FLAG_NO_DATA,
  13599. { },
  13600. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13601. .fill_helper = bpf_fill_staggered_jslt32_imm,
  13602. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13603. },
  13604. {
  13605. "Staggered jumps: JMP32_JSLE_K",
  13606. { },
  13607. INTERNAL | FLAG_NO_DATA,
  13608. { },
  13609. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13610. .fill_helper = bpf_fill_staggered_jsle32_imm,
  13611. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13612. },
  13613. /* Staggered jump sequences, JMP32 register */
  13614. {
  13615. "Staggered jumps: JMP32_JEQ_X",
  13616. { },
  13617. INTERNAL | FLAG_NO_DATA,
  13618. { },
  13619. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13620. .fill_helper = bpf_fill_staggered_jeq32_reg,
  13621. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13622. },
  13623. {
  13624. "Staggered jumps: JMP32_JNE_X",
  13625. { },
  13626. INTERNAL | FLAG_NO_DATA,
  13627. { },
  13628. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13629. .fill_helper = bpf_fill_staggered_jne32_reg,
  13630. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13631. },
  13632. {
  13633. "Staggered jumps: JMP32_JSET_X",
  13634. { },
  13635. INTERNAL | FLAG_NO_DATA,
  13636. { },
  13637. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13638. .fill_helper = bpf_fill_staggered_jset32_reg,
  13639. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13640. },
  13641. {
  13642. "Staggered jumps: JMP32_JGT_X",
  13643. { },
  13644. INTERNAL | FLAG_NO_DATA,
  13645. { },
  13646. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13647. .fill_helper = bpf_fill_staggered_jgt32_reg,
  13648. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13649. },
  13650. {
  13651. "Staggered jumps: JMP32_JGE_X",
  13652. { },
  13653. INTERNAL | FLAG_NO_DATA,
  13654. { },
  13655. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13656. .fill_helper = bpf_fill_staggered_jge32_reg,
  13657. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13658. },
  13659. {
  13660. "Staggered jumps: JMP32_JLT_X",
  13661. { },
  13662. INTERNAL | FLAG_NO_DATA,
  13663. { },
  13664. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13665. .fill_helper = bpf_fill_staggered_jlt32_reg,
  13666. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13667. },
  13668. {
  13669. "Staggered jumps: JMP32_JLE_X",
  13670. { },
  13671. INTERNAL | FLAG_NO_DATA,
  13672. { },
  13673. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13674. .fill_helper = bpf_fill_staggered_jle32_reg,
  13675. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13676. },
  13677. {
  13678. "Staggered jumps: JMP32_JSGT_X",
  13679. { },
  13680. INTERNAL | FLAG_NO_DATA,
  13681. { },
  13682. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13683. .fill_helper = bpf_fill_staggered_jsgt32_reg,
  13684. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13685. },
  13686. {
  13687. "Staggered jumps: JMP32_JSGE_X",
  13688. { },
  13689. INTERNAL | FLAG_NO_DATA,
  13690. { },
  13691. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13692. .fill_helper = bpf_fill_staggered_jsge32_reg,
  13693. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13694. },
  13695. {
  13696. "Staggered jumps: JMP32_JSLT_X",
  13697. { },
  13698. INTERNAL | FLAG_NO_DATA,
  13699. { },
  13700. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13701. .fill_helper = bpf_fill_staggered_jslt32_reg,
  13702. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13703. },
  13704. {
  13705. "Staggered jumps: JMP32_JSLE_X",
  13706. { },
  13707. INTERNAL | FLAG_NO_DATA,
  13708. { },
  13709. { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13710. .fill_helper = bpf_fill_staggered_jsle32_reg,
  13711. .nr_testruns = NR_STAGGERED_JMP_RUNS,
  13712. },
  13713. };
  13714. static struct net_device dev;
  13715. static struct sk_buff *populate_skb(char *buf, int size)
  13716. {
  13717. struct sk_buff *skb;
  13718. if (size >= MAX_DATA)
  13719. return NULL;
  13720. skb = alloc_skb(MAX_DATA, GFP_KERNEL);
  13721. if (!skb)
  13722. return NULL;
  13723. __skb_put_data(skb, buf, size);
  13724. /* Initialize a fake skb with test pattern. */
  13725. skb_reset_mac_header(skb);
  13726. skb->protocol = htons(ETH_P_IP);
  13727. skb->pkt_type = SKB_TYPE;
  13728. skb->mark = SKB_MARK;
  13729. skb->hash = SKB_HASH;
  13730. skb->queue_mapping = SKB_QUEUE_MAP;
  13731. skb->vlan_tci = SKB_VLAN_TCI;
  13732. skb->vlan_present = SKB_VLAN_PRESENT;
  13733. skb->vlan_proto = htons(ETH_P_IP);
  13734. dev_net_set(&dev, &init_net);
  13735. skb->dev = &dev;
  13736. skb->dev->ifindex = SKB_DEV_IFINDEX;
  13737. skb->dev->type = SKB_DEV_TYPE;
  13738. skb_set_network_header(skb, min(size, ETH_HLEN));
  13739. return skb;
  13740. }
  13741. static void *generate_test_data(struct bpf_test *test, int sub)
  13742. {
  13743. struct sk_buff *skb;
  13744. struct page *page;
  13745. if (test->aux & FLAG_NO_DATA)
  13746. return NULL;
  13747. if (test->aux & FLAG_LARGE_MEM)
  13748. return kmalloc(test->test[sub].data_size, GFP_KERNEL);
  13749. /* Test case expects an skb, so populate one. Various
  13750. * subtests generate skbs of different sizes based on
  13751. * the same data.
  13752. */
  13753. skb = populate_skb(test->data, test->test[sub].data_size);
  13754. if (!skb)
  13755. return NULL;
  13756. if (test->aux & FLAG_SKB_FRAG) {
  13757. /*
  13758. * when the test requires a fragmented skb, add a
  13759. * single fragment to the skb, filled with
  13760. * test->frag_data.
  13761. */
  13762. void *ptr;
  13763. page = alloc_page(GFP_KERNEL);
  13764. if (!page)
  13765. goto err_kfree_skb;
  13766. ptr = kmap(page);
  13767. if (!ptr)
  13768. goto err_free_page;
  13769. memcpy(ptr, test->frag_data, MAX_DATA);
  13770. kunmap(page);
  13771. skb_add_rx_frag(skb, 0, page, 0, MAX_DATA, MAX_DATA);
  13772. }
  13773. return skb;
  13774. err_free_page:
  13775. __free_page(page);
  13776. err_kfree_skb:
  13777. kfree_skb(skb);
  13778. return NULL;
  13779. }
  13780. static void release_test_data(const struct bpf_test *test, void *data)
  13781. {
  13782. if (test->aux & FLAG_NO_DATA)
  13783. return;
  13784. if (test->aux & FLAG_LARGE_MEM)
  13785. kfree(data);
  13786. else
  13787. kfree_skb(data);
  13788. }
  13789. static int filter_length(int which)
  13790. {
  13791. struct sock_filter *fp;
  13792. int len;
  13793. if (tests[which].fill_helper)
  13794. return tests[which].u.ptr.len;
  13795. fp = tests[which].u.insns;
  13796. for (len = MAX_INSNS - 1; len > 0; --len)
  13797. if (fp[len].code != 0 || fp[len].k != 0)
  13798. break;
  13799. return len + 1;
  13800. }
  13801. static void *filter_pointer(int which)
  13802. {
  13803. if (tests[which].fill_helper)
  13804. return tests[which].u.ptr.insns;
  13805. else
  13806. return tests[which].u.insns;
  13807. }
  13808. static struct bpf_prog *generate_filter(int which, int *err)
  13809. {
  13810. __u8 test_type = tests[which].aux & TEST_TYPE_MASK;
  13811. unsigned int flen = filter_length(which);
  13812. void *fptr = filter_pointer(which);
  13813. struct sock_fprog_kern fprog;
  13814. struct bpf_prog *fp;
  13815. switch (test_type) {
  13816. case CLASSIC:
  13817. fprog.filter = fptr;
  13818. fprog.len = flen;
  13819. *err = bpf_prog_create(&fp, &fprog);
  13820. if (tests[which].aux & FLAG_EXPECTED_FAIL) {
  13821. if (*err == tests[which].expected_errcode) {
  13822. pr_cont("PASS\n");
  13823. /* Verifier rejected filter as expected. */
  13824. *err = 0;
  13825. return NULL;
  13826. } else {
  13827. pr_cont("UNEXPECTED_PASS\n");
  13828. /* Verifier didn't reject the test that's
  13829. * bad enough, just return!
  13830. */
  13831. *err = -EINVAL;
  13832. return NULL;
  13833. }
  13834. }
  13835. if (*err) {
  13836. pr_cont("FAIL to prog_create err=%d len=%d\n",
  13837. *err, fprog.len);
  13838. return NULL;
  13839. }
  13840. break;
  13841. case INTERNAL:
  13842. fp = bpf_prog_alloc(bpf_prog_size(flen), 0);
  13843. if (fp == NULL) {
  13844. pr_cont("UNEXPECTED_FAIL no memory left\n");
  13845. *err = -ENOMEM;
  13846. return NULL;
  13847. }
  13848. fp->len = flen;
  13849. /* Type doesn't really matter here as long as it's not unspec. */
  13850. fp->type = BPF_PROG_TYPE_SOCKET_FILTER;
  13851. memcpy(fp->insnsi, fptr, fp->len * sizeof(struct bpf_insn));
  13852. fp->aux->stack_depth = tests[which].stack_depth;
  13853. fp->aux->verifier_zext = !!(tests[which].aux &
  13854. FLAG_VERIFIER_ZEXT);
  13855. /* We cannot error here as we don't need type compatibility
  13856. * checks.
  13857. */
  13858. fp = bpf_prog_select_runtime(fp, err);
  13859. if (*err) {
  13860. pr_cont("FAIL to select_runtime err=%d\n", *err);
  13861. return NULL;
  13862. }
  13863. break;
  13864. }
  13865. *err = 0;
  13866. return fp;
  13867. }
  13868. static void release_filter(struct bpf_prog *fp, int which)
  13869. {
  13870. __u8 test_type = tests[which].aux & TEST_TYPE_MASK;
  13871. switch (test_type) {
  13872. case CLASSIC:
  13873. bpf_prog_destroy(fp);
  13874. break;
  13875. case INTERNAL:
  13876. bpf_prog_free(fp);
  13877. break;
  13878. }
  13879. }
  13880. static int __run_one(const struct bpf_prog *fp, const void *data,
  13881. int runs, u64 *duration)
  13882. {
  13883. u64 start, finish;
  13884. int ret = 0, i;
  13885. migrate_disable();
  13886. start = ktime_get_ns();
  13887. for (i = 0; i < runs; i++)
  13888. ret = bpf_prog_run(fp, data);
  13889. finish = ktime_get_ns();
  13890. migrate_enable();
  13891. *duration = finish - start;
  13892. do_div(*duration, runs);
  13893. return ret;
  13894. }
  13895. static int run_one(const struct bpf_prog *fp, struct bpf_test *test)
  13896. {
  13897. int err_cnt = 0, i, runs = MAX_TESTRUNS;
  13898. if (test->nr_testruns)
  13899. runs = min(test->nr_testruns, MAX_TESTRUNS);
  13900. for (i = 0; i < MAX_SUBTESTS; i++) {
  13901. void *data;
  13902. u64 duration;
  13903. u32 ret;
  13904. /*
  13905. * NOTE: Several sub-tests may be present, in which case
  13906. * a zero {data_size, result} tuple indicates the end of
  13907. * the sub-test array. The first test is always run,
  13908. * even if both data_size and result happen to be zero.
  13909. */
  13910. if (i > 0 &&
  13911. test->test[i].data_size == 0 &&
  13912. test->test[i].result == 0)
  13913. break;
  13914. data = generate_test_data(test, i);
  13915. if (!data && !(test->aux & FLAG_NO_DATA)) {
  13916. pr_cont("data generation failed ");
  13917. err_cnt++;
  13918. break;
  13919. }
  13920. ret = __run_one(fp, data, runs, &duration);
  13921. release_test_data(test, data);
  13922. if (ret == test->test[i].result) {
  13923. pr_cont("%lld ", duration);
  13924. } else {
  13925. pr_cont("ret %d != %d ", ret,
  13926. test->test[i].result);
  13927. err_cnt++;
  13928. }
  13929. }
  13930. return err_cnt;
  13931. }
  13932. static char test_name[64];
  13933. module_param_string(test_name, test_name, sizeof(test_name), 0);
  13934. static int test_id = -1;
  13935. module_param(test_id, int, 0);
  13936. static int test_range[2] = { 0, INT_MAX };
  13937. module_param_array(test_range, int, NULL, 0);
  13938. static bool exclude_test(int test_id)
  13939. {
  13940. return test_id < test_range[0] || test_id > test_range[1];
  13941. }
  13942. static __init struct sk_buff *build_test_skb(void)
  13943. {
  13944. u32 headroom = NET_SKB_PAD + NET_IP_ALIGN + ETH_HLEN;
  13945. struct sk_buff *skb[2];
  13946. struct page *page[2];
  13947. int i, data_size = 8;
  13948. for (i = 0; i < 2; i++) {
  13949. page[i] = alloc_page(GFP_KERNEL);
  13950. if (!page[i]) {
  13951. if (i == 0)
  13952. goto err_page0;
  13953. else
  13954. goto err_page1;
  13955. }
  13956. /* this will set skb[i]->head_frag */
  13957. skb[i] = dev_alloc_skb(headroom + data_size);
  13958. if (!skb[i]) {
  13959. if (i == 0)
  13960. goto err_skb0;
  13961. else
  13962. goto err_skb1;
  13963. }
  13964. skb_reserve(skb[i], headroom);
  13965. skb_put(skb[i], data_size);
  13966. skb[i]->protocol = htons(ETH_P_IP);
  13967. skb_reset_network_header(skb[i]);
  13968. skb_set_mac_header(skb[i], -ETH_HLEN);
  13969. skb_add_rx_frag(skb[i], 0, page[i], 0, 64, 64);
  13970. // skb_headlen(skb[i]): 8, skb[i]->head_frag = 1
  13971. }
  13972. /* setup shinfo */
  13973. skb_shinfo(skb[0])->gso_size = 1448;
  13974. skb_shinfo(skb[0])->gso_type = SKB_GSO_TCPV4;
  13975. skb_shinfo(skb[0])->gso_type |= SKB_GSO_DODGY;
  13976. skb_shinfo(skb[0])->gso_segs = 0;
  13977. skb_shinfo(skb[0])->frag_list = skb[1];
  13978. skb_shinfo(skb[0])->hwtstamps.hwtstamp = 1000;
  13979. /* adjust skb[0]'s len */
  13980. skb[0]->len += skb[1]->len;
  13981. skb[0]->data_len += skb[1]->data_len;
  13982. skb[0]->truesize += skb[1]->truesize;
  13983. return skb[0];
  13984. err_skb1:
  13985. __free_page(page[1]);
  13986. err_page1:
  13987. kfree_skb(skb[0]);
  13988. err_skb0:
  13989. __free_page(page[0]);
  13990. err_page0:
  13991. return NULL;
  13992. }
  13993. static __init struct sk_buff *build_test_skb_linear_no_head_frag(void)
  13994. {
  13995. unsigned int alloc_size = 2000;
  13996. unsigned int headroom = 102, doffset = 72, data_size = 1308;
  13997. struct sk_buff *skb[2];
  13998. int i;
  13999. /* skbs linked in a frag_list, both with linear data, with head_frag=0
  14000. * (data allocated by kmalloc), both have tcp data of 1308 bytes
  14001. * (total payload is 2616 bytes).
  14002. * Data offset is 72 bytes (40 ipv6 hdr, 32 tcp hdr). Some headroom.
  14003. */
  14004. for (i = 0; i < 2; i++) {
  14005. skb[i] = alloc_skb(alloc_size, GFP_KERNEL);
  14006. if (!skb[i]) {
  14007. if (i == 0)
  14008. goto err_skb0;
  14009. else
  14010. goto err_skb1;
  14011. }
  14012. skb[i]->protocol = htons(ETH_P_IPV6);
  14013. skb_reserve(skb[i], headroom);
  14014. skb_put(skb[i], doffset + data_size);
  14015. skb_reset_network_header(skb[i]);
  14016. if (i == 0)
  14017. skb_reset_mac_header(skb[i]);
  14018. else
  14019. skb_set_mac_header(skb[i], -ETH_HLEN);
  14020. __skb_pull(skb[i], doffset);
  14021. }
  14022. /* setup shinfo.
  14023. * mimic bpf_skb_proto_4_to_6, which resets gso_segs and assigns a
  14024. * reduced gso_size.
  14025. */
  14026. skb_shinfo(skb[0])->gso_size = 1288;
  14027. skb_shinfo(skb[0])->gso_type = SKB_GSO_TCPV6 | SKB_GSO_DODGY;
  14028. skb_shinfo(skb[0])->gso_segs = 0;
  14029. skb_shinfo(skb[0])->frag_list = skb[1];
  14030. /* adjust skb[0]'s len */
  14031. skb[0]->len += skb[1]->len;
  14032. skb[0]->data_len += skb[1]->len;
  14033. skb[0]->truesize += skb[1]->truesize;
  14034. return skb[0];
  14035. err_skb1:
  14036. kfree_skb(skb[0]);
  14037. err_skb0:
  14038. return NULL;
  14039. }
  14040. struct skb_segment_test {
  14041. const char *descr;
  14042. struct sk_buff *(*build_skb)(void);
  14043. netdev_features_t features;
  14044. };
  14045. static struct skb_segment_test skb_segment_tests[] __initconst = {
  14046. {
  14047. .descr = "gso_with_rx_frags",
  14048. .build_skb = build_test_skb,
  14049. .features = NETIF_F_SG | NETIF_F_GSO_PARTIAL | NETIF_F_IP_CSUM |
  14050. NETIF_F_IPV6_CSUM | NETIF_F_RXCSUM
  14051. },
  14052. {
  14053. .descr = "gso_linear_no_head_frag",
  14054. .build_skb = build_test_skb_linear_no_head_frag,
  14055. .features = NETIF_F_SG | NETIF_F_FRAGLIST |
  14056. NETIF_F_HW_VLAN_CTAG_TX | NETIF_F_GSO |
  14057. NETIF_F_LLTX | NETIF_F_GRO |
  14058. NETIF_F_IPV6_CSUM | NETIF_F_RXCSUM |
  14059. NETIF_F_HW_VLAN_STAG_TX
  14060. }
  14061. };
  14062. static __init int test_skb_segment_single(const struct skb_segment_test *test)
  14063. {
  14064. struct sk_buff *skb, *segs;
  14065. int ret = -1;
  14066. skb = test->build_skb();
  14067. if (!skb) {
  14068. pr_info("%s: failed to build_test_skb", __func__);
  14069. goto done;
  14070. }
  14071. segs = skb_segment(skb, test->features);
  14072. if (!IS_ERR(segs)) {
  14073. kfree_skb_list(segs);
  14074. ret = 0;
  14075. }
  14076. kfree_skb(skb);
  14077. done:
  14078. return ret;
  14079. }
  14080. static __init int test_skb_segment(void)
  14081. {
  14082. int i, err_cnt = 0, pass_cnt = 0;
  14083. for (i = 0; i < ARRAY_SIZE(skb_segment_tests); i++) {
  14084. const struct skb_segment_test *test = &skb_segment_tests[i];
  14085. cond_resched();
  14086. if (exclude_test(i))
  14087. continue;
  14088. pr_info("#%d %s ", i, test->descr);
  14089. if (test_skb_segment_single(test)) {
  14090. pr_cont("FAIL\n");
  14091. err_cnt++;
  14092. } else {
  14093. pr_cont("PASS\n");
  14094. pass_cnt++;
  14095. }
  14096. }
  14097. pr_info("%s: Summary: %d PASSED, %d FAILED\n", __func__,
  14098. pass_cnt, err_cnt);
  14099. return err_cnt ? -EINVAL : 0;
  14100. }
  14101. static __init int test_bpf(void)
  14102. {
  14103. int i, err_cnt = 0, pass_cnt = 0;
  14104. int jit_cnt = 0, run_cnt = 0;
  14105. for (i = 0; i < ARRAY_SIZE(tests); i++) {
  14106. struct bpf_prog *fp;
  14107. int err;
  14108. cond_resched();
  14109. if (exclude_test(i))
  14110. continue;
  14111. pr_info("#%d %s ", i, tests[i].descr);
  14112. if (tests[i].fill_helper &&
  14113. tests[i].fill_helper(&tests[i]) < 0) {
  14114. pr_cont("FAIL to prog_fill\n");
  14115. continue;
  14116. }
  14117. fp = generate_filter(i, &err);
  14118. if (tests[i].fill_helper) {
  14119. kfree(tests[i].u.ptr.insns);
  14120. tests[i].u.ptr.insns = NULL;
  14121. }
  14122. if (fp == NULL) {
  14123. if (err == 0) {
  14124. pass_cnt++;
  14125. continue;
  14126. }
  14127. err_cnt++;
  14128. continue;
  14129. }
  14130. pr_cont("jited:%u ", fp->jited);
  14131. run_cnt++;
  14132. if (fp->jited)
  14133. jit_cnt++;
  14134. err = run_one(fp, &tests[i]);
  14135. release_filter(fp, i);
  14136. if (err) {
  14137. pr_cont("FAIL (%d times)\n", err);
  14138. err_cnt++;
  14139. } else {
  14140. pr_cont("PASS\n");
  14141. pass_cnt++;
  14142. }
  14143. }
  14144. pr_info("Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
  14145. pass_cnt, err_cnt, jit_cnt, run_cnt);
  14146. return err_cnt ? -EINVAL : 0;
  14147. }
  14148. struct tail_call_test {
  14149. const char *descr;
  14150. struct bpf_insn insns[MAX_INSNS];
  14151. int flags;
  14152. int result;
  14153. int stack_depth;
  14154. };
  14155. /* Flags that can be passed to tail call test cases */
  14156. #define FLAG_NEED_STATE BIT(0)
  14157. #define FLAG_RESULT_IN_STATE BIT(1)
  14158. /*
  14159. * Magic marker used in test snippets for tail calls below.
  14160. * BPF_LD/MOV to R2 and R2 with this immediate value is replaced
  14161. * with the proper values by the test runner.
  14162. */
  14163. #define TAIL_CALL_MARKER 0x7a11ca11
  14164. /* Special offset to indicate a NULL call target */
  14165. #define TAIL_CALL_NULL 0x7fff
  14166. /* Special offset to indicate an out-of-range index */
  14167. #define TAIL_CALL_INVALID 0x7ffe
  14168. #define TAIL_CALL(offset) \
  14169. BPF_LD_IMM64(R2, TAIL_CALL_MARKER), \
  14170. BPF_RAW_INSN(BPF_ALU | BPF_MOV | BPF_K, R3, 0, \
  14171. offset, TAIL_CALL_MARKER), \
  14172. BPF_JMP_IMM(BPF_TAIL_CALL, 0, 0, 0)
  14173. /*
  14174. * A test function to be called from a BPF program, clobbering a lot of
  14175. * CPU registers in the process. A JITed BPF program calling this function
  14176. * must save and restore any caller-saved registers it uses for internal
  14177. * state, for example the current tail call count.
  14178. */
  14179. BPF_CALL_1(bpf_test_func, u64, arg)
  14180. {
  14181. char buf[64];
  14182. long a = 0;
  14183. long b = 1;
  14184. long c = 2;
  14185. long d = 3;
  14186. long e = 4;
  14187. long f = 5;
  14188. long g = 6;
  14189. long h = 7;
  14190. return snprintf(buf, sizeof(buf),
  14191. "%ld %lu %lx %ld %lu %lx %ld %lu %x",
  14192. a, b, c, d, e, f, g, h, (int)arg);
  14193. }
  14194. #define BPF_FUNC_test_func __BPF_FUNC_MAX_ID
  14195. /*
  14196. * Tail call tests. Each test case may call any other test in the table,
  14197. * including itself, specified as a relative index offset from the calling
  14198. * test. The index TAIL_CALL_NULL can be used to specify a NULL target
  14199. * function to test the JIT error path. Similarly, the index TAIL_CALL_INVALID
  14200. * results in a target index that is out of range.
  14201. */
  14202. static struct tail_call_test tail_call_tests[] = {
  14203. {
  14204. "Tail call leaf",
  14205. .insns = {
  14206. BPF_ALU64_REG(BPF_MOV, R0, R1),
  14207. BPF_ALU64_IMM(BPF_ADD, R0, 1),
  14208. BPF_EXIT_INSN(),
  14209. },
  14210. .result = 1,
  14211. },
  14212. {
  14213. "Tail call 2",
  14214. .insns = {
  14215. BPF_ALU64_IMM(BPF_ADD, R1, 2),
  14216. TAIL_CALL(-1),
  14217. BPF_ALU64_IMM(BPF_MOV, R0, -1),
  14218. BPF_EXIT_INSN(),
  14219. },
  14220. .result = 3,
  14221. },
  14222. {
  14223. "Tail call 3",
  14224. .insns = {
  14225. BPF_ALU64_IMM(BPF_ADD, R1, 3),
  14226. TAIL_CALL(-1),
  14227. BPF_ALU64_IMM(BPF_MOV, R0, -1),
  14228. BPF_EXIT_INSN(),
  14229. },
  14230. .result = 6,
  14231. },
  14232. {
  14233. "Tail call 4",
  14234. .insns = {
  14235. BPF_ALU64_IMM(BPF_ADD, R1, 4),
  14236. TAIL_CALL(-1),
  14237. BPF_ALU64_IMM(BPF_MOV, R0, -1),
  14238. BPF_EXIT_INSN(),
  14239. },
  14240. .result = 10,
  14241. },
  14242. {
  14243. "Tail call load/store leaf",
  14244. .insns = {
  14245. BPF_ALU64_IMM(BPF_MOV, R1, 1),
  14246. BPF_ALU64_IMM(BPF_MOV, R2, 2),
  14247. BPF_ALU64_REG(BPF_MOV, R3, BPF_REG_FP),
  14248. BPF_STX_MEM(BPF_DW, R3, R1, -8),
  14249. BPF_STX_MEM(BPF_DW, R3, R2, -16),
  14250. BPF_LDX_MEM(BPF_DW, R0, BPF_REG_FP, -8),
  14251. BPF_JMP_REG(BPF_JNE, R0, R1, 3),
  14252. BPF_LDX_MEM(BPF_DW, R0, BPF_REG_FP, -16),
  14253. BPF_JMP_REG(BPF_JNE, R0, R2, 1),
  14254. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  14255. BPF_EXIT_INSN(),
  14256. },
  14257. .result = 0,
  14258. .stack_depth = 32,
  14259. },
  14260. {
  14261. "Tail call load/store",
  14262. .insns = {
  14263. BPF_ALU64_IMM(BPF_MOV, R0, 3),
  14264. BPF_STX_MEM(BPF_DW, BPF_REG_FP, R0, -8),
  14265. TAIL_CALL(-1),
  14266. BPF_ALU64_IMM(BPF_MOV, R0, -1),
  14267. BPF_EXIT_INSN(),
  14268. },
  14269. .result = 0,
  14270. .stack_depth = 16,
  14271. },
  14272. {
  14273. "Tail call error path, max count reached",
  14274. .insns = {
  14275. BPF_LDX_MEM(BPF_W, R2, R1, 0),
  14276. BPF_ALU64_IMM(BPF_ADD, R2, 1),
  14277. BPF_STX_MEM(BPF_W, R1, R2, 0),
  14278. TAIL_CALL(0),
  14279. BPF_EXIT_INSN(),
  14280. },
  14281. .flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
  14282. .result = (MAX_TAIL_CALL_CNT + 1) * MAX_TESTRUNS,
  14283. },
  14284. {
  14285. "Tail call count preserved across function calls",
  14286. .insns = {
  14287. BPF_LDX_MEM(BPF_W, R2, R1, 0),
  14288. BPF_ALU64_IMM(BPF_ADD, R2, 1),
  14289. BPF_STX_MEM(BPF_W, R1, R2, 0),
  14290. BPF_STX_MEM(BPF_DW, R10, R1, -8),
  14291. BPF_CALL_REL(BPF_FUNC_get_numa_node_id),
  14292. BPF_CALL_REL(BPF_FUNC_ktime_get_ns),
  14293. BPF_CALL_REL(BPF_FUNC_ktime_get_boot_ns),
  14294. BPF_CALL_REL(BPF_FUNC_ktime_get_coarse_ns),
  14295. BPF_CALL_REL(BPF_FUNC_jiffies64),
  14296. BPF_CALL_REL(BPF_FUNC_test_func),
  14297. BPF_LDX_MEM(BPF_DW, R1, R10, -8),
  14298. BPF_ALU32_REG(BPF_MOV, R0, R1),
  14299. TAIL_CALL(0),
  14300. BPF_EXIT_INSN(),
  14301. },
  14302. .stack_depth = 8,
  14303. .flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
  14304. .result = (MAX_TAIL_CALL_CNT + 1) * MAX_TESTRUNS,
  14305. },
  14306. {
  14307. "Tail call error path, NULL target",
  14308. .insns = {
  14309. BPF_LDX_MEM(BPF_W, R2, R1, 0),
  14310. BPF_ALU64_IMM(BPF_ADD, R2, 1),
  14311. BPF_STX_MEM(BPF_W, R1, R2, 0),
  14312. TAIL_CALL(TAIL_CALL_NULL),
  14313. BPF_EXIT_INSN(),
  14314. },
  14315. .flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
  14316. .result = MAX_TESTRUNS,
  14317. },
  14318. {
  14319. "Tail call error path, index out of range",
  14320. .insns = {
  14321. BPF_LDX_MEM(BPF_W, R2, R1, 0),
  14322. BPF_ALU64_IMM(BPF_ADD, R2, 1),
  14323. BPF_STX_MEM(BPF_W, R1, R2, 0),
  14324. TAIL_CALL(TAIL_CALL_INVALID),
  14325. BPF_EXIT_INSN(),
  14326. },
  14327. .flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
  14328. .result = MAX_TESTRUNS,
  14329. },
  14330. };
  14331. static void __init destroy_tail_call_tests(struct bpf_array *progs)
  14332. {
  14333. int i;
  14334. for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++)
  14335. if (progs->ptrs[i])
  14336. bpf_prog_free(progs->ptrs[i]);
  14337. kfree(progs);
  14338. }
  14339. static __init int prepare_tail_call_tests(struct bpf_array **pprogs)
  14340. {
  14341. int ntests = ARRAY_SIZE(tail_call_tests);
  14342. struct bpf_array *progs;
  14343. int which, err;
  14344. /* Allocate the table of programs to be used for tall calls */
  14345. progs = kzalloc(sizeof(*progs) + (ntests + 1) * sizeof(progs->ptrs[0]),
  14346. GFP_KERNEL);
  14347. if (!progs)
  14348. goto out_nomem;
  14349. /* Create all eBPF programs and populate the table */
  14350. for (which = 0; which < ntests; which++) {
  14351. struct tail_call_test *test = &tail_call_tests[which];
  14352. struct bpf_prog *fp;
  14353. int len, i;
  14354. /* Compute the number of program instructions */
  14355. for (len = 0; len < MAX_INSNS; len++) {
  14356. struct bpf_insn *insn = &test->insns[len];
  14357. if (len < MAX_INSNS - 1 &&
  14358. insn->code == (BPF_LD | BPF_DW | BPF_IMM))
  14359. len++;
  14360. if (insn->code == 0)
  14361. break;
  14362. }
  14363. /* Allocate and initialize the program */
  14364. fp = bpf_prog_alloc(bpf_prog_size(len), 0);
  14365. if (!fp)
  14366. goto out_nomem;
  14367. fp->len = len;
  14368. fp->type = BPF_PROG_TYPE_SOCKET_FILTER;
  14369. fp->aux->stack_depth = test->stack_depth;
  14370. memcpy(fp->insnsi, test->insns, len * sizeof(struct bpf_insn));
  14371. /* Relocate runtime tail call offsets and addresses */
  14372. for (i = 0; i < len; i++) {
  14373. struct bpf_insn *insn = &fp->insnsi[i];
  14374. long addr = 0;
  14375. switch (insn->code) {
  14376. case BPF_LD | BPF_DW | BPF_IMM:
  14377. if (insn->imm != TAIL_CALL_MARKER)
  14378. break;
  14379. insn[0].imm = (u32)(long)progs;
  14380. insn[1].imm = ((u64)(long)progs) >> 32;
  14381. break;
  14382. case BPF_ALU | BPF_MOV | BPF_K:
  14383. if (insn->imm != TAIL_CALL_MARKER)
  14384. break;
  14385. if (insn->off == TAIL_CALL_NULL)
  14386. insn->imm = ntests;
  14387. else if (insn->off == TAIL_CALL_INVALID)
  14388. insn->imm = ntests + 1;
  14389. else
  14390. insn->imm = which + insn->off;
  14391. insn->off = 0;
  14392. break;
  14393. case BPF_JMP | BPF_CALL:
  14394. if (insn->src_reg != BPF_PSEUDO_CALL)
  14395. break;
  14396. switch (insn->imm) {
  14397. case BPF_FUNC_get_numa_node_id:
  14398. addr = (long)&numa_node_id;
  14399. break;
  14400. case BPF_FUNC_ktime_get_ns:
  14401. addr = (long)&ktime_get_ns;
  14402. break;
  14403. case BPF_FUNC_ktime_get_boot_ns:
  14404. addr = (long)&ktime_get_boot_fast_ns;
  14405. break;
  14406. case BPF_FUNC_ktime_get_coarse_ns:
  14407. addr = (long)&ktime_get_coarse_ns;
  14408. break;
  14409. case BPF_FUNC_jiffies64:
  14410. addr = (long)&get_jiffies_64;
  14411. break;
  14412. case BPF_FUNC_test_func:
  14413. addr = (long)&bpf_test_func;
  14414. break;
  14415. default:
  14416. err = -EFAULT;
  14417. goto out_err;
  14418. }
  14419. *insn = BPF_EMIT_CALL(addr);
  14420. if ((long)__bpf_call_base + insn->imm != addr)
  14421. *insn = BPF_JMP_A(0); /* Skip: NOP */
  14422. break;
  14423. }
  14424. }
  14425. fp = bpf_prog_select_runtime(fp, &err);
  14426. if (err)
  14427. goto out_err;
  14428. progs->ptrs[which] = fp;
  14429. }
  14430. /* The last entry contains a NULL program pointer */
  14431. progs->map.max_entries = ntests + 1;
  14432. *pprogs = progs;
  14433. return 0;
  14434. out_nomem:
  14435. err = -ENOMEM;
  14436. out_err:
  14437. if (progs)
  14438. destroy_tail_call_tests(progs);
  14439. return err;
  14440. }
  14441. static __init int test_tail_calls(struct bpf_array *progs)
  14442. {
  14443. int i, err_cnt = 0, pass_cnt = 0;
  14444. int jit_cnt = 0, run_cnt = 0;
  14445. for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++) {
  14446. struct tail_call_test *test = &tail_call_tests[i];
  14447. struct bpf_prog *fp = progs->ptrs[i];
  14448. int *data = NULL;
  14449. int state = 0;
  14450. u64 duration;
  14451. int ret;
  14452. cond_resched();
  14453. if (exclude_test(i))
  14454. continue;
  14455. pr_info("#%d %s ", i, test->descr);
  14456. if (!fp) {
  14457. err_cnt++;
  14458. continue;
  14459. }
  14460. pr_cont("jited:%u ", fp->jited);
  14461. run_cnt++;
  14462. if (fp->jited)
  14463. jit_cnt++;
  14464. if (test->flags & FLAG_NEED_STATE)
  14465. data = &state;
  14466. ret = __run_one(fp, data, MAX_TESTRUNS, &duration);
  14467. if (test->flags & FLAG_RESULT_IN_STATE)
  14468. ret = state;
  14469. if (ret == test->result) {
  14470. pr_cont("%lld PASS", duration);
  14471. pass_cnt++;
  14472. } else {
  14473. pr_cont("ret %d != %d FAIL", ret, test->result);
  14474. err_cnt++;
  14475. }
  14476. }
  14477. pr_info("%s: Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
  14478. __func__, pass_cnt, err_cnt, jit_cnt, run_cnt);
  14479. return err_cnt ? -EINVAL : 0;
  14480. }
  14481. static char test_suite[32];
  14482. module_param_string(test_suite, test_suite, sizeof(test_suite), 0);
  14483. static __init int find_test_index(const char *test_name)
  14484. {
  14485. int i;
  14486. if (!strcmp(test_suite, "test_bpf")) {
  14487. for (i = 0; i < ARRAY_SIZE(tests); i++) {
  14488. if (!strcmp(tests[i].descr, test_name))
  14489. return i;
  14490. }
  14491. }
  14492. if (!strcmp(test_suite, "test_tail_calls")) {
  14493. for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++) {
  14494. if (!strcmp(tail_call_tests[i].descr, test_name))
  14495. return i;
  14496. }
  14497. }
  14498. if (!strcmp(test_suite, "test_skb_segment")) {
  14499. for (i = 0; i < ARRAY_SIZE(skb_segment_tests); i++) {
  14500. if (!strcmp(skb_segment_tests[i].descr, test_name))
  14501. return i;
  14502. }
  14503. }
  14504. return -1;
  14505. }
  14506. static __init int prepare_test_range(void)
  14507. {
  14508. int valid_range;
  14509. if (!strcmp(test_suite, "test_bpf"))
  14510. valid_range = ARRAY_SIZE(tests);
  14511. else if (!strcmp(test_suite, "test_tail_calls"))
  14512. valid_range = ARRAY_SIZE(tail_call_tests);
  14513. else if (!strcmp(test_suite, "test_skb_segment"))
  14514. valid_range = ARRAY_SIZE(skb_segment_tests);
  14515. else
  14516. return 0;
  14517. if (test_id >= 0) {
  14518. /*
  14519. * if a test_id was specified, use test_range to
  14520. * cover only that test.
  14521. */
  14522. if (test_id >= valid_range) {
  14523. pr_err("test_bpf: invalid test_id specified for '%s' suite.\n",
  14524. test_suite);
  14525. return -EINVAL;
  14526. }
  14527. test_range[0] = test_id;
  14528. test_range[1] = test_id;
  14529. } else if (*test_name) {
  14530. /*
  14531. * if a test_name was specified, find it and setup
  14532. * test_range to cover only that test.
  14533. */
  14534. int idx = find_test_index(test_name);
  14535. if (idx < 0) {
  14536. pr_err("test_bpf: no test named '%s' found for '%s' suite.\n",
  14537. test_name, test_suite);
  14538. return -EINVAL;
  14539. }
  14540. test_range[0] = idx;
  14541. test_range[1] = idx;
  14542. } else if (test_range[0] != 0 || test_range[1] != INT_MAX) {
  14543. /*
  14544. * check that the supplied test_range is valid.
  14545. */
  14546. if (test_range[0] < 0 || test_range[1] >= valid_range) {
  14547. pr_err("test_bpf: test_range is out of bound for '%s' suite.\n",
  14548. test_suite);
  14549. return -EINVAL;
  14550. }
  14551. if (test_range[1] < test_range[0]) {
  14552. pr_err("test_bpf: test_range is ending before it starts.\n");
  14553. return -EINVAL;
  14554. }
  14555. }
  14556. return 0;
  14557. }
  14558. static int __init test_bpf_init(void)
  14559. {
  14560. struct bpf_array *progs = NULL;
  14561. int ret;
  14562. if (strlen(test_suite) &&
  14563. strcmp(test_suite, "test_bpf") &&
  14564. strcmp(test_suite, "test_tail_calls") &&
  14565. strcmp(test_suite, "test_skb_segment")) {
  14566. pr_err("test_bpf: invalid test_suite '%s' specified.\n", test_suite);
  14567. return -EINVAL;
  14568. }
  14569. /*
  14570. * if test_suite is not specified, but test_id, test_name or test_range
  14571. * is specified, set 'test_bpf' as the default test suite.
  14572. */
  14573. if (!strlen(test_suite) &&
  14574. (test_id != -1 || strlen(test_name) ||
  14575. (test_range[0] != 0 || test_range[1] != INT_MAX))) {
  14576. pr_info("test_bpf: set 'test_bpf' as the default test_suite.\n");
  14577. strscpy(test_suite, "test_bpf", sizeof(test_suite));
  14578. }
  14579. ret = prepare_test_range();
  14580. if (ret < 0)
  14581. return ret;
  14582. if (!strlen(test_suite) || !strcmp(test_suite, "test_bpf")) {
  14583. ret = test_bpf();
  14584. if (ret)
  14585. return ret;
  14586. }
  14587. if (!strlen(test_suite) || !strcmp(test_suite, "test_tail_calls")) {
  14588. ret = prepare_tail_call_tests(&progs);
  14589. if (ret)
  14590. return ret;
  14591. ret = test_tail_calls(progs);
  14592. destroy_tail_call_tests(progs);
  14593. if (ret)
  14594. return ret;
  14595. }
  14596. if (!strlen(test_suite) || !strcmp(test_suite, "test_skb_segment"))
  14597. return test_skb_segment();
  14598. return 0;
  14599. }
  14600. static void __exit test_bpf_exit(void)
  14601. {
  14602. }
  14603. module_init(test_bpf_init);
  14604. module_exit(test_bpf_exit);
  14605. MODULE_LICENSE("GPL");