123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748874987508751875287538754875587568757875887598760876187628763876487658766876787688769877087718772877387748775877687778778877987808781878287838784878587868787878887898790879187928793879487958796879787988799880088018802880388048805880688078808880988108811881288138814881588168817881888198820882188228823882488258826882788288829883088318832883388348835883688378838883988408841884288438844884588468847884888498850885188528853885488558856885788588859886088618862886388648865886688678868886988708871887288738874887588768877887888798880888188828883888488858886888788888889889088918892889388948895889688978898889989008901890289038904890589068907890889098910891189128913891489158916891789188919892089218922892389248925892689278928892989308931893289338934893589368937893889398940894189428943894489458946894789488949895089518952895389548955895689578958895989608961896289638964896589668967896889698970897189728973897489758976897789788979898089818982898389848985898689878988898989908991899289938994899589968997899889999000900190029003900490059006900790089009901090119012901390149015901690179018901990209021902290239024902590269027902890299030903190329033903490359036903790389039904090419042904390449045904690479048904990509051905290539054905590569057905890599060906190629063906490659066906790689069907090719072907390749075907690779078907990809081908290839084908590869087908890899090909190929093909490959096909790989099910091019102910391049105910691079108910991109111911291139114911591169117911891199120912191229123912491259126912791289129913091319132913391349135913691379138913991409141914291439144914591469147914891499150915191529153915491559156915791589159916091619162916391649165916691679168916991709171917291739174917591769177917891799180918191829183918491859186918791889189919091919192919391949195919691979198919992009201920292039204920592069207920892099210921192129213921492159216921792189219922092219222922392249225922692279228922992309231923292339234923592369237923892399240924192429243924492459246924792489249925092519252925392549255925692579258925992609261926292639264926592669267926892699270927192729273927492759276927792789279928092819282928392849285928692879288928992909291929292939294929592969297929892999300930193029303930493059306930793089309931093119312931393149315931693179318931993209321932293239324932593269327932893299330933193329333933493359336933793389339934093419342934393449345934693479348934993509351935293539354935593569357935893599360936193629363936493659366936793689369937093719372937393749375937693779378937993809381938293839384938593869387938893899390939193929393939493959396939793989399940094019402940394049405940694079408940994109411941294139414941594169417941894199420942194229423942494259426942794289429943094319432943394349435943694379438943994409441944294439444944594469447944894499450945194529453945494559456945794589459946094619462946394649465946694679468946994709471947294739474947594769477947894799480948194829483948494859486948794889489949094919492949394949495949694979498949995009501950295039504950595069507950895099510951195129513951495159516951795189519952095219522952395249525952695279528952995309531953295339534953595369537953895399540954195429543954495459546954795489549955095519552955395549555955695579558955995609561956295639564956595669567956895699570957195729573957495759576957795789579958095819582958395849585958695879588958995909591959295939594959595969597959895999600960196029603960496059606960796089609961096119612961396149615961696179618961996209621962296239624962596269627962896299630963196329633963496359636963796389639964096419642964396449645964696479648964996509651965296539654965596569657965896599660966196629663966496659666966796689669967096719672967396749675967696779678967996809681968296839684968596869687968896899690969196929693969496959696969796989699970097019702970397049705970697079708970997109711971297139714971597169717971897199720972197229723972497259726972797289729973097319732973397349735973697379738973997409741974297439744974597469747974897499750975197529753975497559756975797589759976097619762976397649765976697679768976997709771977297739774977597769777977897799780978197829783978497859786978797889789979097919792979397949795979697979798979998009801980298039804980598069807980898099810981198129813981498159816981798189819982098219822982398249825982698279828982998309831983298339834983598369837983898399840984198429843984498459846984798489849985098519852985398549855985698579858985998609861986298639864986598669867986898699870987198729873987498759876987798789879988098819882988398849885988698879888988998909891989298939894989598969897989898999900990199029903990499059906990799089909991099119912991399149915991699179918991999209921992299239924992599269927992899299930993199329933993499359936993799389939994099419942994399449945994699479948994999509951995299539954995599569957995899599960996199629963996499659966996799689969997099719972997399749975997699779978997999809981998299839984998599869987998899899990999199929993999499959996999799989999100001000110002100031000410005100061000710008100091001010011100121001310014100151001610017100181001910020100211002210023100241002510026100271002810029100301003110032100331003410035100361003710038100391004010041100421004310044100451004610047100481004910050100511005210053100541005510056100571005810059100601006110062100631006410065100661006710068100691007010071100721007310074100751007610077100781007910080100811008210083100841008510086100871008810089100901009110092100931009410095100961009710098100991010010101101021010310104101051010610107101081010910110101111011210113101141011510116101171011810119101201012110122101231012410125101261012710128101291013010131101321013310134101351013610137101381013910140101411014210143101441014510146101471014810149101501015110152101531015410155101561015710158101591016010161101621016310164101651016610167101681016910170101711017210173101741017510176101771017810179101801018110182101831018410185101861018710188101891019010191101921019310194101951019610197101981019910200102011020210203102041020510206102071020810209102101021110212102131021410215102161021710218102191022010221102221022310224102251022610227102281022910230102311023210233102341023510236102371023810239102401024110242102431024410245102461024710248102491025010251102521025310254102551025610257102581025910260102611026210263102641026510266102671026810269102701027110272102731027410275102761027710278102791028010281102821028310284102851028610287102881028910290102911029210293102941029510296102971029810299103001030110302103031030410305103061030710308103091031010311103121031310314103151031610317103181031910320103211032210323103241032510326103271032810329103301033110332103331033410335103361033710338103391034010341103421034310344103451034610347103481034910350103511035210353103541035510356103571035810359103601036110362103631036410365103661036710368103691037010371103721037310374103751037610377103781037910380103811038210383103841038510386103871038810389103901039110392103931039410395103961039710398103991040010401104021040310404104051040610407104081040910410104111041210413104141041510416104171041810419104201042110422104231042410425104261042710428104291043010431104321043310434104351043610437104381043910440104411044210443104441044510446104471044810449104501045110452104531045410455104561045710458104591046010461104621046310464104651046610467104681046910470104711047210473104741047510476104771047810479104801048110482104831048410485104861048710488104891049010491104921049310494104951049610497104981049910500105011050210503105041050510506105071050810509105101051110512105131051410515105161051710518105191052010521105221052310524105251052610527105281052910530105311053210533105341053510536105371053810539105401054110542105431054410545105461054710548105491055010551105521055310554105551055610557105581055910560105611056210563105641056510566105671056810569105701057110572105731057410575105761057710578105791058010581105821058310584105851058610587105881058910590105911059210593105941059510596105971059810599106001060110602106031060410605106061060710608106091061010611106121061310614106151061610617106181061910620106211062210623106241062510626106271062810629106301063110632106331063410635106361063710638106391064010641106421064310644106451064610647106481064910650106511065210653106541065510656106571065810659106601066110662106631066410665106661066710668106691067010671106721067310674106751067610677106781067910680106811068210683106841068510686106871068810689106901069110692106931069410695106961069710698106991070010701107021070310704107051070610707107081070910710107111071210713107141071510716107171071810719107201072110722107231072410725107261072710728107291073010731107321073310734107351073610737107381073910740107411074210743107441074510746107471074810749107501075110752107531075410755107561075710758107591076010761107621076310764107651076610767107681076910770107711077210773107741077510776107771077810779107801078110782107831078410785107861078710788107891079010791107921079310794107951079610797107981079910800108011080210803108041080510806108071080810809108101081110812108131081410815108161081710818108191082010821108221082310824108251082610827108281082910830108311083210833108341083510836108371083810839108401084110842108431084410845108461084710848108491085010851108521085310854108551085610857108581085910860108611086210863108641086510866108671086810869108701087110872108731087410875108761087710878108791088010881108821088310884108851088610887108881088910890108911089210893108941089510896108971089810899109001090110902109031090410905109061090710908109091091010911109121091310914109151091610917109181091910920109211092210923109241092510926109271092810929109301093110932109331093410935109361093710938109391094010941109421094310944109451094610947109481094910950109511095210953109541095510956109571095810959109601096110962109631096410965109661096710968109691097010971109721097310974109751097610977109781097910980109811098210983109841098510986109871098810989109901099110992109931099410995109961099710998109991100011001110021100311004110051100611007110081100911010110111101211013110141101511016110171101811019110201102111022110231102411025110261102711028110291103011031110321103311034110351103611037110381103911040110411104211043110441104511046110471104811049110501105111052110531105411055110561105711058110591106011061110621106311064110651106611067110681106911070110711107211073110741107511076110771107811079110801108111082110831108411085110861108711088110891109011091110921109311094110951109611097110981109911100111011110211103111041110511106111071110811109111101111111112111131111411115111161111711118111191112011121111221112311124111251112611127111281112911130111311113211133111341113511136111371113811139111401114111142111431114411145111461114711148111491115011151111521115311154111551115611157111581115911160111611116211163111641116511166111671116811169111701117111172111731117411175111761117711178111791118011181111821118311184111851118611187111881118911190111911119211193111941119511196111971119811199112001120111202112031120411205112061120711208112091121011211112121121311214112151121611217112181121911220112211122211223112241122511226112271122811229112301123111232112331123411235112361123711238112391124011241112421124311244112451124611247112481124911250112511125211253112541125511256112571125811259112601126111262112631126411265112661126711268112691127011271112721127311274112751127611277112781127911280112811128211283112841128511286112871128811289112901129111292112931129411295112961129711298112991130011301113021130311304113051130611307113081130911310113111131211313113141131511316113171131811319113201132111322113231132411325113261132711328113291133011331113321133311334113351133611337113381133911340113411134211343113441134511346113471134811349113501135111352113531135411355113561135711358113591136011361113621136311364113651136611367113681136911370113711137211373113741137511376113771137811379113801138111382113831138411385113861138711388113891139011391113921139311394113951139611397113981139911400114011140211403114041140511406114071140811409114101141111412114131141411415114161141711418114191142011421114221142311424114251142611427114281142911430114311143211433114341143511436114371143811439114401144111442114431144411445114461144711448114491145011451114521145311454114551145611457114581145911460114611146211463114641146511466114671146811469114701147111472114731147411475114761147711478114791148011481114821148311484114851148611487114881148911490114911149211493114941149511496114971149811499115001150111502115031150411505115061150711508115091151011511115121151311514115151151611517115181151911520115211152211523115241152511526115271152811529115301153111532115331153411535115361153711538115391154011541115421154311544115451154611547115481154911550115511155211553115541155511556115571155811559115601156111562115631156411565115661156711568115691157011571115721157311574115751157611577115781157911580115811158211583115841158511586115871158811589115901159111592115931159411595115961159711598115991160011601116021160311604116051160611607116081160911610116111161211613116141161511616116171161811619116201162111622116231162411625116261162711628116291163011631116321163311634116351163611637116381163911640116411164211643116441164511646116471164811649116501165111652116531165411655116561165711658116591166011661116621166311664116651166611667116681166911670116711167211673116741167511676116771167811679116801168111682116831168411685116861168711688116891169011691116921169311694116951169611697116981169911700117011170211703117041170511706117071170811709117101171111712117131171411715117161171711718117191172011721117221172311724117251172611727117281172911730117311173211733117341173511736117371173811739117401174111742117431174411745117461174711748117491175011751117521175311754117551175611757117581175911760117611176211763117641176511766117671176811769117701177111772117731177411775117761177711778117791178011781117821178311784117851178611787117881178911790117911179211793117941179511796117971179811799118001180111802118031180411805118061180711808118091181011811118121181311814118151181611817118181181911820118211182211823118241182511826118271182811829118301183111832118331183411835118361183711838118391184011841118421184311844118451184611847118481184911850118511185211853118541185511856118571185811859118601186111862118631186411865118661186711868118691187011871118721187311874118751187611877118781187911880118811188211883118841188511886118871188811889118901189111892118931189411895118961189711898118991190011901119021190311904119051190611907119081190911910119111191211913119141191511916119171191811919119201192111922119231192411925119261192711928119291193011931119321193311934119351193611937119381193911940119411194211943119441194511946119471194811949119501195111952119531195411955119561195711958119591196011961119621196311964119651196611967119681196911970119711197211973119741197511976119771197811979119801198111982119831198411985119861198711988119891199011991119921199311994119951199611997119981199912000120011200212003120041200512006120071200812009120101201112012120131201412015120161201712018120191202012021120221202312024120251202612027120281202912030120311203212033120341203512036120371203812039120401204112042120431204412045120461204712048120491205012051120521205312054120551205612057120581205912060120611206212063120641206512066120671206812069120701207112072120731207412075120761207712078120791208012081120821208312084120851208612087120881208912090120911209212093120941209512096120971209812099121001210112102121031210412105121061210712108121091211012111121121211312114121151211612117121181211912120121211212212123121241212512126121271212812129121301213112132121331213412135121361213712138121391214012141121421214312144121451214612147121481214912150121511215212153121541215512156121571215812159121601216112162121631216412165121661216712168121691217012171121721217312174121751217612177121781217912180121811218212183121841218512186121871218812189121901219112192121931219412195121961219712198121991220012201122021220312204122051220612207122081220912210122111221212213122141221512216122171221812219122201222112222122231222412225122261222712228122291223012231122321223312234122351223612237122381223912240122411224212243122441224512246122471224812249122501225112252122531225412255122561225712258122591226012261122621226312264122651226612267122681226912270122711227212273122741227512276122771227812279122801228112282122831228412285122861228712288122891229012291122921229312294122951229612297122981229912300123011230212303123041230512306123071230812309123101231112312123131231412315123161231712318123191232012321123221232312324123251232612327123281232912330123311233212333123341233512336123371233812339123401234112342123431234412345123461234712348123491235012351123521235312354123551235612357123581235912360123611236212363123641236512366123671236812369123701237112372123731237412375123761237712378123791238012381123821238312384123851238612387123881238912390123911239212393123941239512396123971239812399124001240112402124031240412405124061240712408124091241012411124121241312414124151241612417124181241912420124211242212423124241242512426124271242812429124301243112432124331243412435124361243712438124391244012441124421244312444124451244612447124481244912450124511245212453124541245512456124571245812459124601246112462124631246412465124661246712468124691247012471124721247312474124751247612477124781247912480124811248212483124841248512486124871248812489124901249112492124931249412495124961249712498124991250012501125021250312504125051250612507125081250912510125111251212513125141251512516125171251812519125201252112522125231252412525125261252712528125291253012531125321253312534125351253612537125381253912540125411254212543125441254512546125471254812549125501255112552125531255412555125561255712558125591256012561125621256312564125651256612567125681256912570125711257212573125741257512576125771257812579125801258112582125831258412585125861258712588125891259012591125921259312594125951259612597125981259912600126011260212603126041260512606126071260812609126101261112612126131261412615126161261712618126191262012621126221262312624126251262612627126281262912630126311263212633126341263512636126371263812639126401264112642126431264412645126461264712648126491265012651126521265312654126551265612657126581265912660126611266212663126641266512666126671266812669126701267112672126731267412675126761267712678126791268012681126821268312684126851268612687126881268912690126911269212693126941269512696126971269812699127001270112702127031270412705127061270712708127091271012711127121271312714127151271612717127181271912720127211272212723127241272512726127271272812729127301273112732127331273412735127361273712738127391274012741127421274312744127451274612747127481274912750127511275212753127541275512756127571275812759127601276112762127631276412765127661276712768127691277012771127721277312774127751277612777127781277912780127811278212783127841278512786127871278812789127901279112792127931279412795127961279712798127991280012801128021280312804128051280612807128081280912810128111281212813128141281512816128171281812819128201282112822128231282412825128261282712828128291283012831128321283312834128351283612837128381283912840128411284212843128441284512846128471284812849128501285112852128531285412855128561285712858128591286012861128621286312864128651286612867128681286912870128711287212873128741287512876128771287812879128801288112882128831288412885128861288712888128891289012891128921289312894128951289612897128981289912900129011290212903129041290512906129071290812909129101291112912129131291412915129161291712918129191292012921129221292312924129251292612927129281292912930129311293212933129341293512936129371293812939129401294112942129431294412945129461294712948129491295012951129521295312954129551295612957129581295912960129611296212963129641296512966129671296812969129701297112972129731297412975129761297712978129791298012981129821298312984129851298612987129881298912990129911299212993129941299512996129971299812999130001300113002130031300413005130061300713008130091301013011130121301313014130151301613017130181301913020130211302213023130241302513026130271302813029130301303113032130331303413035130361303713038130391304013041130421304313044130451304613047130481304913050130511305213053130541305513056130571305813059130601306113062130631306413065130661306713068130691307013071130721307313074130751307613077130781307913080130811308213083130841308513086130871308813089130901309113092130931309413095130961309713098130991310013101131021310313104131051310613107131081310913110131111311213113131141311513116131171311813119131201312113122131231312413125131261312713128131291313013131131321313313134131351313613137131381313913140131411314213143131441314513146131471314813149131501315113152131531315413155131561315713158131591316013161131621316313164131651316613167131681316913170131711317213173131741317513176131771317813179131801318113182131831318413185131861318713188131891319013191131921319313194131951319613197131981319913200132011320213203132041320513206132071320813209132101321113212132131321413215132161321713218132191322013221132221322313224132251322613227132281322913230132311323213233132341323513236132371323813239132401324113242132431324413245132461324713248132491325013251132521325313254132551325613257132581325913260132611326213263132641326513266132671326813269132701327113272132731327413275132761327713278132791328013281132821328313284132851328613287132881328913290132911329213293132941329513296132971329813299133001330113302133031330413305133061330713308133091331013311133121331313314133151331613317133181331913320133211332213323133241332513326133271332813329133301333113332133331333413335133361333713338133391334013341133421334313344133451334613347133481334913350133511335213353133541335513356133571335813359133601336113362133631336413365133661336713368133691337013371133721337313374133751337613377133781337913380133811338213383133841338513386133871338813389133901339113392133931339413395133961339713398133991340013401134021340313404134051340613407134081340913410134111341213413134141341513416134171341813419134201342113422134231342413425134261342713428134291343013431134321343313434134351343613437134381343913440134411344213443134441344513446134471344813449134501345113452134531345413455134561345713458134591346013461134621346313464134651346613467134681346913470134711347213473134741347513476134771347813479134801348113482134831348413485134861348713488134891349013491134921349313494134951349613497134981349913500135011350213503135041350513506135071350813509135101351113512135131351413515135161351713518135191352013521135221352313524135251352613527135281352913530135311353213533135341353513536135371353813539135401354113542135431354413545135461354713548135491355013551135521355313554135551355613557135581355913560135611356213563135641356513566135671356813569135701357113572135731357413575135761357713578135791358013581135821358313584135851358613587135881358913590135911359213593135941359513596135971359813599136001360113602136031360413605136061360713608136091361013611136121361313614136151361613617136181361913620136211362213623136241362513626136271362813629136301363113632136331363413635136361363713638136391364013641136421364313644136451364613647136481364913650136511365213653136541365513656136571365813659136601366113662136631366413665136661366713668136691367013671136721367313674136751367613677136781367913680136811368213683136841368513686136871368813689136901369113692136931369413695136961369713698136991370013701137021370313704137051370613707137081370913710137111371213713137141371513716137171371813719137201372113722137231372413725137261372713728137291373013731137321373313734137351373613737137381373913740137411374213743137441374513746137471374813749137501375113752137531375413755137561375713758137591376013761137621376313764137651376613767137681376913770137711377213773137741377513776137771377813779137801378113782137831378413785137861378713788137891379013791137921379313794137951379613797137981379913800138011380213803138041380513806138071380813809138101381113812138131381413815138161381713818138191382013821138221382313824138251382613827138281382913830138311383213833138341383513836138371383813839138401384113842138431384413845138461384713848138491385013851138521385313854138551385613857138581385913860138611386213863138641386513866138671386813869138701387113872138731387413875138761387713878138791388013881138821388313884138851388613887138881388913890138911389213893138941389513896138971389813899139001390113902139031390413905139061390713908139091391013911139121391313914139151391613917139181391913920139211392213923139241392513926139271392813929139301393113932139331393413935139361393713938139391394013941139421394313944139451394613947139481394913950139511395213953139541395513956139571395813959139601396113962139631396413965139661396713968139691397013971139721397313974139751397613977139781397913980139811398213983139841398513986139871398813989139901399113992139931399413995139961399713998139991400014001140021400314004140051400614007140081400914010140111401214013140141401514016140171401814019140201402114022140231402414025140261402714028140291403014031140321403314034140351403614037140381403914040140411404214043140441404514046140471404814049140501405114052140531405414055140561405714058140591406014061140621406314064140651406614067140681406914070140711407214073140741407514076140771407814079140801408114082140831408414085140861408714088140891409014091140921409314094140951409614097140981409914100141011410214103141041410514106141071410814109141101411114112141131411414115141161411714118141191412014121141221412314124141251412614127141281412914130141311413214133141341413514136141371413814139141401414114142141431414414145141461414714148141491415014151141521415314154141551415614157141581415914160141611416214163141641416514166141671416814169141701417114172141731417414175141761417714178141791418014181141821418314184141851418614187141881418914190141911419214193141941419514196141971419814199142001420114202142031420414205142061420714208142091421014211142121421314214142151421614217142181421914220142211422214223142241422514226142271422814229142301423114232142331423414235142361423714238142391424014241142421424314244142451424614247142481424914250142511425214253142541425514256142571425814259142601426114262142631426414265142661426714268142691427014271142721427314274142751427614277142781427914280142811428214283142841428514286142871428814289142901429114292142931429414295142961429714298142991430014301143021430314304143051430614307143081430914310143111431214313143141431514316143171431814319143201432114322143231432414325143261432714328143291433014331143321433314334143351433614337143381433914340143411434214343143441434514346143471434814349143501435114352143531435414355143561435714358143591436014361143621436314364143651436614367143681436914370143711437214373143741437514376143771437814379143801438114382143831438414385143861438714388143891439014391143921439314394143951439614397143981439914400144011440214403144041440514406144071440814409144101441114412144131441414415144161441714418144191442014421144221442314424144251442614427144281442914430144311443214433144341443514436144371443814439144401444114442144431444414445144461444714448144491445014451144521445314454144551445614457144581445914460144611446214463144641446514466144671446814469144701447114472144731447414475144761447714478144791448014481144821448314484144851448614487144881448914490144911449214493144941449514496144971449814499145001450114502145031450414505145061450714508145091451014511145121451314514145151451614517145181451914520145211452214523145241452514526145271452814529145301453114532145331453414535145361453714538145391454014541145421454314544145451454614547145481454914550145511455214553145541455514556145571455814559145601456114562145631456414565145661456714568145691457014571145721457314574145751457614577145781457914580145811458214583145841458514586145871458814589145901459114592145931459414595145961459714598145991460014601146021460314604146051460614607146081460914610146111461214613146141461514616146171461814619146201462114622146231462414625146261462714628146291463014631146321463314634146351463614637146381463914640146411464214643146441464514646146471464814649146501465114652146531465414655146561465714658146591466014661146621466314664146651466614667146681466914670146711467214673146741467514676146771467814679146801468114682146831468414685146861468714688146891469014691146921469314694146951469614697146981469914700147011470214703147041470514706147071470814709147101471114712147131471414715147161471714718147191472014721147221472314724147251472614727147281472914730147311473214733147341473514736147371473814739147401474114742147431474414745147461474714748147491475014751147521475314754147551475614757147581475914760147611476214763147641476514766147671476814769147701477114772147731477414775147761477714778147791478014781147821478314784147851478614787147881478914790147911479214793147941479514796147971479814799148001480114802148031480414805148061480714808148091481014811148121481314814148151481614817148181481914820148211482214823148241482514826148271482814829148301483114832148331483414835148361483714838148391484014841148421484314844148451484614847148481484914850148511485214853148541485514856148571485814859148601486114862148631486414865148661486714868148691487014871148721487314874148751487614877148781487914880148811488214883148841488514886148871488814889148901489114892148931489414895148961489714898148991490014901149021490314904149051490614907149081490914910149111491214913149141491514916149171491814919149201492114922149231492414925149261492714928149291493014931149321493314934149351493614937149381493914940149411494214943149441494514946149471494814949149501495114952149531495414955149561495714958149591496014961149621496314964149651496614967149681496914970149711497214973149741497514976149771497814979149801498114982149831498414985149861498714988149891499014991149921499314994149951499614997149981499915000150011500215003150041500515006150071500815009150101501115012150131501415015150161501715018150191502015021150221502315024150251502615027150281502915030150311503215033150341503515036150371503815039150401504115042150431504415045150461504715048150491505015051150521505315054150551505615057150581505915060150611506215063150641506515066150671506815069150701507115072150731507415075150761507715078150791508015081150821508315084150851508615087150881508915090150911509215093150941509515096150971509815099151001510115102151031510415105151061510715108151091511015111151121511315114151151511615117151181511915120151211512215123151241512515126151271512815129151301513115132151331513415135151361513715138151391514015141151421514315144151451514615147151481514915150151511515215153151541515515156151571515815159151601516115162151631516415165151661516715168151691517015171151721517315174151751517615177151781517915180151811518215183151841518515186151871518815189151901519115192151931519415195151961519715198151991520015201152021520315204152051520615207152081520915210152111521215213152141521515216152171521815219152201522115222152231522415225152261522715228152291523015231152321523315234152351523615237152381523915240152411524215243152441524515246152471524815249152501525115252152531525415255152561525715258152591526015261152621526315264152651526615267152681526915270152711527215273152741527515276152771527815279152801528115282152831528415285152861528715288152891529015291152921529315294152951529615297152981529915300153011530215303153041530515306153071530815309153101531115312153131531415315153161531715318153191532015321153221532315324153251532615327153281532915330153311533215333153341533515336153371533815339153401534115342153431534415345153461534715348153491535015351153521535315354153551535615357153581535915360153611536215363153641536515366 |
- // SPDX-License-Identifier: GPL-2.0-only
- /*
- * Testsuite for BPF interpreter and BPF JIT compiler
- *
- * Copyright (c) 2011-2014 PLUMgrid, http://plumgrid.com
- */
- #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
- #include <linux/init.h>
- #include <linux/module.h>
- #include <linux/filter.h>
- #include <linux/bpf.h>
- #include <linux/skbuff.h>
- #include <linux/netdevice.h>
- #include <linux/if_vlan.h>
- #include <linux/random.h>
- #include <linux/highmem.h>
- #include <linux/sched.h>
- /* General test specific settings */
- #define MAX_SUBTESTS 3
- #define MAX_TESTRUNS 1000
- #define MAX_DATA 128
- #define MAX_INSNS 512
- #define MAX_K 0xffffFFFF
- /* Few constants used to init test 'skb' */
- #define SKB_TYPE 3
- #define SKB_MARK 0x1234aaaa
- #define SKB_HASH 0x1234aaab
- #define SKB_QUEUE_MAP 123
- #define SKB_VLAN_TCI 0xffff
- #define SKB_VLAN_PRESENT 1
- #define SKB_DEV_IFINDEX 577
- #define SKB_DEV_TYPE 588
- /* Redefine REGs to make tests less verbose */
- #define R0 BPF_REG_0
- #define R1 BPF_REG_1
- #define R2 BPF_REG_2
- #define R3 BPF_REG_3
- #define R4 BPF_REG_4
- #define R5 BPF_REG_5
- #define R6 BPF_REG_6
- #define R7 BPF_REG_7
- #define R8 BPF_REG_8
- #define R9 BPF_REG_9
- #define R10 BPF_REG_10
- /* Flags that can be passed to test cases */
- #define FLAG_NO_DATA BIT(0)
- #define FLAG_EXPECTED_FAIL BIT(1)
- #define FLAG_SKB_FRAG BIT(2)
- #define FLAG_VERIFIER_ZEXT BIT(3)
- #define FLAG_LARGE_MEM BIT(4)
- enum {
- CLASSIC = BIT(6), /* Old BPF instructions only. */
- INTERNAL = BIT(7), /* Extended instruction set. */
- };
- #define TEST_TYPE_MASK (CLASSIC | INTERNAL)
- struct bpf_test {
- const char *descr;
- union {
- struct sock_filter insns[MAX_INSNS];
- struct bpf_insn insns_int[MAX_INSNS];
- struct {
- void *insns;
- unsigned int len;
- } ptr;
- } u;
- __u8 aux;
- __u8 data[MAX_DATA];
- struct {
- int data_size;
- __u32 result;
- } test[MAX_SUBTESTS];
- int (*fill_helper)(struct bpf_test *self);
- int expected_errcode; /* used when FLAG_EXPECTED_FAIL is set in the aux */
- __u8 frag_data[MAX_DATA];
- int stack_depth; /* for eBPF only, since tests don't call verifier */
- int nr_testruns; /* Custom run count, defaults to MAX_TESTRUNS if 0 */
- };
- /* Large test cases need separate allocation and fill handler. */
- static int bpf_fill_maxinsns1(struct bpf_test *self)
- {
- unsigned int len = BPF_MAXINSNS;
- struct sock_filter *insn;
- __u32 k = ~0;
- int i;
- insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
- if (!insn)
- return -ENOMEM;
- for (i = 0; i < len; i++, k--)
- insn[i] = __BPF_STMT(BPF_RET | BPF_K, k);
- self->u.ptr.insns = insn;
- self->u.ptr.len = len;
- return 0;
- }
- static int bpf_fill_maxinsns2(struct bpf_test *self)
- {
- unsigned int len = BPF_MAXINSNS;
- struct sock_filter *insn;
- int i;
- insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
- if (!insn)
- return -ENOMEM;
- for (i = 0; i < len; i++)
- insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
- self->u.ptr.insns = insn;
- self->u.ptr.len = len;
- return 0;
- }
- static int bpf_fill_maxinsns3(struct bpf_test *self)
- {
- unsigned int len = BPF_MAXINSNS;
- struct sock_filter *insn;
- struct rnd_state rnd;
- int i;
- insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
- if (!insn)
- return -ENOMEM;
- prandom_seed_state(&rnd, 3141592653589793238ULL);
- for (i = 0; i < len - 1; i++) {
- __u32 k = prandom_u32_state(&rnd);
- insn[i] = __BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, k);
- }
- insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
- self->u.ptr.insns = insn;
- self->u.ptr.len = len;
- return 0;
- }
- static int bpf_fill_maxinsns4(struct bpf_test *self)
- {
- unsigned int len = BPF_MAXINSNS + 1;
- struct sock_filter *insn;
- int i;
- insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
- if (!insn)
- return -ENOMEM;
- for (i = 0; i < len; i++)
- insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
- self->u.ptr.insns = insn;
- self->u.ptr.len = len;
- return 0;
- }
- static int bpf_fill_maxinsns5(struct bpf_test *self)
- {
- unsigned int len = BPF_MAXINSNS;
- struct sock_filter *insn;
- int i;
- insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
- if (!insn)
- return -ENOMEM;
- insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
- for (i = 1; i < len - 1; i++)
- insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
- insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
- self->u.ptr.insns = insn;
- self->u.ptr.len = len;
- return 0;
- }
- static int bpf_fill_maxinsns6(struct bpf_test *self)
- {
- unsigned int len = BPF_MAXINSNS;
- struct sock_filter *insn;
- int i;
- insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
- if (!insn)
- return -ENOMEM;
- for (i = 0; i < len - 1; i++)
- insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
- SKF_AD_VLAN_TAG_PRESENT);
- insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
- self->u.ptr.insns = insn;
- self->u.ptr.len = len;
- return 0;
- }
- static int bpf_fill_maxinsns7(struct bpf_test *self)
- {
- unsigned int len = BPF_MAXINSNS;
- struct sock_filter *insn;
- int i;
- insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
- if (!insn)
- return -ENOMEM;
- for (i = 0; i < len - 4; i++)
- insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
- SKF_AD_CPU);
- insn[len - 4] = __BPF_STMT(BPF_MISC | BPF_TAX, 0);
- insn[len - 3] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
- SKF_AD_CPU);
- insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0);
- insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
- self->u.ptr.insns = insn;
- self->u.ptr.len = len;
- return 0;
- }
- static int bpf_fill_maxinsns8(struct bpf_test *self)
- {
- unsigned int len = BPF_MAXINSNS;
- struct sock_filter *insn;
- int i, jmp_off = len - 3;
- insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
- if (!insn)
- return -ENOMEM;
- insn[0] = __BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff);
- for (i = 1; i < len - 1; i++)
- insn[i] = __BPF_JUMP(BPF_JMP | BPF_JGT, 0xffffffff, jmp_off--, 0);
- insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
- self->u.ptr.insns = insn;
- self->u.ptr.len = len;
- return 0;
- }
- static int bpf_fill_maxinsns9(struct bpf_test *self)
- {
- unsigned int len = BPF_MAXINSNS;
- struct bpf_insn *insn;
- int i;
- insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
- if (!insn)
- return -ENOMEM;
- insn[0] = BPF_JMP_IMM(BPF_JA, 0, 0, len - 2);
- insn[1] = BPF_ALU32_IMM(BPF_MOV, R0, 0xcbababab);
- insn[2] = BPF_EXIT_INSN();
- for (i = 3; i < len - 2; i++)
- insn[i] = BPF_ALU32_IMM(BPF_MOV, R0, 0xfefefefe);
- insn[len - 2] = BPF_EXIT_INSN();
- insn[len - 1] = BPF_JMP_IMM(BPF_JA, 0, 0, -(len - 1));
- self->u.ptr.insns = insn;
- self->u.ptr.len = len;
- return 0;
- }
- static int bpf_fill_maxinsns10(struct bpf_test *self)
- {
- unsigned int len = BPF_MAXINSNS, hlen = len - 2;
- struct bpf_insn *insn;
- int i;
- insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
- if (!insn)
- return -ENOMEM;
- for (i = 0; i < hlen / 2; i++)
- insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 2 - 2 * i);
- for (i = hlen - 1; i > hlen / 2; i--)
- insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 1 - 2 * i);
- insn[hlen / 2] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen / 2 - 1);
- insn[hlen] = BPF_ALU32_IMM(BPF_MOV, R0, 0xabababac);
- insn[hlen + 1] = BPF_EXIT_INSN();
- self->u.ptr.insns = insn;
- self->u.ptr.len = len;
- return 0;
- }
- static int __bpf_fill_ja(struct bpf_test *self, unsigned int len,
- unsigned int plen)
- {
- struct sock_filter *insn;
- unsigned int rlen;
- int i, j;
- insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
- if (!insn)
- return -ENOMEM;
- rlen = (len % plen) - 1;
- for (i = 0; i + plen < len; i += plen)
- for (j = 0; j < plen; j++)
- insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA,
- plen - 1 - j, 0, 0);
- for (j = 0; j < rlen; j++)
- insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA, rlen - 1 - j,
- 0, 0);
- insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xababcbac);
- self->u.ptr.insns = insn;
- self->u.ptr.len = len;
- return 0;
- }
- static int bpf_fill_maxinsns11(struct bpf_test *self)
- {
- /* Hits 70 passes on x86_64 and triggers NOPs padding. */
- return __bpf_fill_ja(self, BPF_MAXINSNS, 68);
- }
- static int bpf_fill_maxinsns12(struct bpf_test *self)
- {
- unsigned int len = BPF_MAXINSNS;
- struct sock_filter *insn;
- int i = 0;
- insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
- if (!insn)
- return -ENOMEM;
- insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
- for (i = 1; i < len - 1; i++)
- insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
- insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
- self->u.ptr.insns = insn;
- self->u.ptr.len = len;
- return 0;
- }
- static int bpf_fill_maxinsns13(struct bpf_test *self)
- {
- unsigned int len = BPF_MAXINSNS;
- struct sock_filter *insn;
- int i = 0;
- insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
- if (!insn)
- return -ENOMEM;
- for (i = 0; i < len - 3; i++)
- insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
- insn[len - 3] = __BPF_STMT(BPF_LD | BPF_IMM, 0xabababab);
- insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0);
- insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
- self->u.ptr.insns = insn;
- self->u.ptr.len = len;
- return 0;
- }
- static int bpf_fill_ja(struct bpf_test *self)
- {
- /* Hits exactly 11 passes on x86_64 JIT. */
- return __bpf_fill_ja(self, 12, 9);
- }
- static int bpf_fill_ld_abs_get_processor_id(struct bpf_test *self)
- {
- unsigned int len = BPF_MAXINSNS;
- struct sock_filter *insn;
- int i;
- insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
- if (!insn)
- return -ENOMEM;
- for (i = 0; i < len - 1; i += 2) {
- insn[i] = __BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 0);
- insn[i + 1] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_CPU);
- }
- insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xbee);
- self->u.ptr.insns = insn;
- self->u.ptr.len = len;
- return 0;
- }
- static int __bpf_fill_stxdw(struct bpf_test *self, int size)
- {
- unsigned int len = BPF_MAXINSNS;
- struct bpf_insn *insn;
- int i;
- insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
- if (!insn)
- return -ENOMEM;
- insn[0] = BPF_ALU32_IMM(BPF_MOV, R0, 1);
- insn[1] = BPF_ST_MEM(size, R10, -40, 42);
- for (i = 2; i < len - 2; i++)
- insn[i] = BPF_STX_XADD(size, R10, R0, -40);
- insn[len - 2] = BPF_LDX_MEM(size, R0, R10, -40);
- insn[len - 1] = BPF_EXIT_INSN();
- self->u.ptr.insns = insn;
- self->u.ptr.len = len;
- self->stack_depth = 40;
- return 0;
- }
- static int bpf_fill_stxw(struct bpf_test *self)
- {
- return __bpf_fill_stxdw(self, BPF_W);
- }
- static int bpf_fill_stxdw(struct bpf_test *self)
- {
- return __bpf_fill_stxdw(self, BPF_DW);
- }
- static int __bpf_ld_imm64(struct bpf_insn insns[2], u8 reg, s64 imm64)
- {
- struct bpf_insn tmp[] = {BPF_LD_IMM64(reg, imm64)};
- memcpy(insns, tmp, sizeof(tmp));
- return 2;
- }
- /*
- * Branch conversion tests. Complex operations can expand to a lot
- * of instructions when JITed. This in turn may cause jump offsets
- * to overflow the field size of the native instruction, triggering
- * a branch conversion mechanism in some JITs.
- */
- static int __bpf_fill_max_jmp(struct bpf_test *self, int jmp, int imm)
- {
- struct bpf_insn *insns;
- int len = S16_MAX + 5;
- int i;
- insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
- if (!insns)
- return -ENOMEM;
- i = __bpf_ld_imm64(insns, R1, 0x0123456789abcdefULL);
- insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
- insns[i++] = BPF_JMP_IMM(jmp, R0, imm, S16_MAX);
- insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 2);
- insns[i++] = BPF_EXIT_INSN();
- while (i < len - 1) {
- static const int ops[] = {
- BPF_LSH, BPF_RSH, BPF_ARSH, BPF_ADD,
- BPF_SUB, BPF_MUL, BPF_DIV, BPF_MOD,
- };
- int op = ops[(i >> 1) % ARRAY_SIZE(ops)];
- if (i & 1)
- insns[i++] = BPF_ALU32_REG(op, R0, R1);
- else
- insns[i++] = BPF_ALU64_REG(op, R0, R1);
- }
- insns[i++] = BPF_EXIT_INSN();
- self->u.ptr.insns = insns;
- self->u.ptr.len = len;
- BUG_ON(i != len);
- return 0;
- }
- /* Branch taken by runtime decision */
- static int bpf_fill_max_jmp_taken(struct bpf_test *self)
- {
- return __bpf_fill_max_jmp(self, BPF_JEQ, 1);
- }
- /* Branch not taken by runtime decision */
- static int bpf_fill_max_jmp_not_taken(struct bpf_test *self)
- {
- return __bpf_fill_max_jmp(self, BPF_JEQ, 0);
- }
- /* Branch always taken, known at JIT time */
- static int bpf_fill_max_jmp_always_taken(struct bpf_test *self)
- {
- return __bpf_fill_max_jmp(self, BPF_JGE, 0);
- }
- /* Branch never taken, known at JIT time */
- static int bpf_fill_max_jmp_never_taken(struct bpf_test *self)
- {
- return __bpf_fill_max_jmp(self, BPF_JLT, 0);
- }
- /* ALU result computation used in tests */
- static bool __bpf_alu_result(u64 *res, u64 v1, u64 v2, u8 op)
- {
- *res = 0;
- switch (op) {
- case BPF_MOV:
- *res = v2;
- break;
- case BPF_AND:
- *res = v1 & v2;
- break;
- case BPF_OR:
- *res = v1 | v2;
- break;
- case BPF_XOR:
- *res = v1 ^ v2;
- break;
- case BPF_LSH:
- *res = v1 << v2;
- break;
- case BPF_RSH:
- *res = v1 >> v2;
- break;
- case BPF_ARSH:
- *res = v1 >> v2;
- if (v2 > 0 && v1 > S64_MAX)
- *res |= ~0ULL << (64 - v2);
- break;
- case BPF_ADD:
- *res = v1 + v2;
- break;
- case BPF_SUB:
- *res = v1 - v2;
- break;
- case BPF_MUL:
- *res = v1 * v2;
- break;
- case BPF_DIV:
- if (v2 == 0)
- return false;
- *res = div64_u64(v1, v2);
- break;
- case BPF_MOD:
- if (v2 == 0)
- return false;
- div64_u64_rem(v1, v2, res);
- break;
- }
- return true;
- }
- /* Test an ALU shift operation for all valid shift values */
- static int __bpf_fill_alu_shift(struct bpf_test *self, u8 op,
- u8 mode, bool alu32)
- {
- static const s64 regs[] = {
- 0x0123456789abcdefLL, /* dword > 0, word < 0 */
- 0xfedcba9876543210LL, /* dowrd < 0, word > 0 */
- 0xfedcba0198765432LL, /* dowrd < 0, word < 0 */
- 0x0123458967abcdefLL, /* dword > 0, word > 0 */
- };
- int bits = alu32 ? 32 : 64;
- int len = (2 + 7 * bits) * ARRAY_SIZE(regs) + 3;
- struct bpf_insn *insn;
- int imm, k;
- int i = 0;
- insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
- if (!insn)
- return -ENOMEM;
- insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
- for (k = 0; k < ARRAY_SIZE(regs); k++) {
- s64 reg = regs[k];
- i += __bpf_ld_imm64(&insn[i], R3, reg);
- for (imm = 0; imm < bits; imm++) {
- u64 val;
- /* Perform operation */
- insn[i++] = BPF_ALU64_REG(BPF_MOV, R1, R3);
- insn[i++] = BPF_ALU64_IMM(BPF_MOV, R2, imm);
- if (alu32) {
- if (mode == BPF_K)
- insn[i++] = BPF_ALU32_IMM(op, R1, imm);
- else
- insn[i++] = BPF_ALU32_REG(op, R1, R2);
- if (op == BPF_ARSH)
- reg = (s32)reg;
- else
- reg = (u32)reg;
- __bpf_alu_result(&val, reg, imm, op);
- val = (u32)val;
- } else {
- if (mode == BPF_K)
- insn[i++] = BPF_ALU64_IMM(op, R1, imm);
- else
- insn[i++] = BPF_ALU64_REG(op, R1, R2);
- __bpf_alu_result(&val, reg, imm, op);
- }
- /*
- * When debugging a JIT that fails this test, one
- * can write the immediate value to R0 here to find
- * out which operand values that fail.
- */
- /* Load reference and check the result */
- i += __bpf_ld_imm64(&insn[i], R4, val);
- insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R4, 1);
- insn[i++] = BPF_EXIT_INSN();
- }
- }
- insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
- insn[i++] = BPF_EXIT_INSN();
- self->u.ptr.insns = insn;
- self->u.ptr.len = len;
- BUG_ON(i != len);
- return 0;
- }
- static int bpf_fill_alu64_lsh_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu_shift(self, BPF_LSH, BPF_K, false);
- }
- static int bpf_fill_alu64_rsh_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu_shift(self, BPF_RSH, BPF_K, false);
- }
- static int bpf_fill_alu64_arsh_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_K, false);
- }
- static int bpf_fill_alu64_lsh_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu_shift(self, BPF_LSH, BPF_X, false);
- }
- static int bpf_fill_alu64_rsh_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu_shift(self, BPF_RSH, BPF_X, false);
- }
- static int bpf_fill_alu64_arsh_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_X, false);
- }
- static int bpf_fill_alu32_lsh_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu_shift(self, BPF_LSH, BPF_K, true);
- }
- static int bpf_fill_alu32_rsh_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu_shift(self, BPF_RSH, BPF_K, true);
- }
- static int bpf_fill_alu32_arsh_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_K, true);
- }
- static int bpf_fill_alu32_lsh_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu_shift(self, BPF_LSH, BPF_X, true);
- }
- static int bpf_fill_alu32_rsh_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu_shift(self, BPF_RSH, BPF_X, true);
- }
- static int bpf_fill_alu32_arsh_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_X, true);
- }
- /*
- * Test an ALU register shift operation for all valid shift values
- * for the case when the source and destination are the same.
- */
- static int __bpf_fill_alu_shift_same_reg(struct bpf_test *self, u8 op,
- bool alu32)
- {
- int bits = alu32 ? 32 : 64;
- int len = 3 + 6 * bits;
- struct bpf_insn *insn;
- int i = 0;
- u64 val;
- insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
- if (!insn)
- return -ENOMEM;
- insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
- for (val = 0; val < bits; val++) {
- u64 res;
- /* Perform operation */
- insn[i++] = BPF_ALU64_IMM(BPF_MOV, R1, val);
- if (alu32)
- insn[i++] = BPF_ALU32_REG(op, R1, R1);
- else
- insn[i++] = BPF_ALU64_REG(op, R1, R1);
- /* Compute the reference result */
- __bpf_alu_result(&res, val, val, op);
- if (alu32)
- res = (u32)res;
- i += __bpf_ld_imm64(&insn[i], R2, res);
- /* Check the actual result */
- insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
- insn[i++] = BPF_EXIT_INSN();
- }
- insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
- insn[i++] = BPF_EXIT_INSN();
- self->u.ptr.insns = insn;
- self->u.ptr.len = len;
- BUG_ON(i != len);
- return 0;
- }
- static int bpf_fill_alu64_lsh_same_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu_shift_same_reg(self, BPF_LSH, false);
- }
- static int bpf_fill_alu64_rsh_same_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu_shift_same_reg(self, BPF_RSH, false);
- }
- static int bpf_fill_alu64_arsh_same_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu_shift_same_reg(self, BPF_ARSH, false);
- }
- static int bpf_fill_alu32_lsh_same_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu_shift_same_reg(self, BPF_LSH, true);
- }
- static int bpf_fill_alu32_rsh_same_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu_shift_same_reg(self, BPF_RSH, true);
- }
- static int bpf_fill_alu32_arsh_same_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu_shift_same_reg(self, BPF_ARSH, true);
- }
- /*
- * Common operand pattern generator for exhaustive power-of-two magnitudes
- * tests. The block size parameters can be adjusted to increase/reduce the
- * number of combinatons tested and thereby execution speed and memory
- * footprint.
- */
- static inline s64 value(int msb, int delta, int sign)
- {
- return sign * (1LL << msb) + delta;
- }
- static int __bpf_fill_pattern(struct bpf_test *self, void *arg,
- int dbits, int sbits, int block1, int block2,
- int (*emit)(struct bpf_test*, void*,
- struct bpf_insn*, s64, s64))
- {
- static const int sgn[][2] = {{1, 1}, {1, -1}, {-1, 1}, {-1, -1}};
- struct bpf_insn *insns;
- int di, si, bt, db, sb;
- int count, len, k;
- int extra = 1 + 2;
- int i = 0;
- /* Total number of iterations for the two pattern */
- count = (dbits - 1) * (sbits - 1) * block1 * block1 * ARRAY_SIZE(sgn);
- count += (max(dbits, sbits) - 1) * block2 * block2 * ARRAY_SIZE(sgn);
- /* Compute the maximum number of insns and allocate the buffer */
- len = extra + count * (*emit)(self, arg, NULL, 0, 0);
- insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
- if (!insns)
- return -ENOMEM;
- /* Add head instruction(s) */
- insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
- /*
- * Pattern 1: all combinations of power-of-two magnitudes and sign,
- * and with a block of contiguous values around each magnitude.
- */
- for (di = 0; di < dbits - 1; di++) /* Dst magnitudes */
- for (si = 0; si < sbits - 1; si++) /* Src magnitudes */
- for (k = 0; k < ARRAY_SIZE(sgn); k++) /* Sign combos */
- for (db = -(block1 / 2);
- db < (block1 + 1) / 2; db++)
- for (sb = -(block1 / 2);
- sb < (block1 + 1) / 2; sb++) {
- s64 dst, src;
- dst = value(di, db, sgn[k][0]);
- src = value(si, sb, sgn[k][1]);
- i += (*emit)(self, arg,
- &insns[i],
- dst, src);
- }
- /*
- * Pattern 2: all combinations for a larger block of values
- * for each power-of-two magnitude and sign, where the magnitude is
- * the same for both operands.
- */
- for (bt = 0; bt < max(dbits, sbits) - 1; bt++) /* Magnitude */
- for (k = 0; k < ARRAY_SIZE(sgn); k++) /* Sign combos */
- for (db = -(block2 / 2); db < (block2 + 1) / 2; db++)
- for (sb = -(block2 / 2);
- sb < (block2 + 1) / 2; sb++) {
- s64 dst, src;
- dst = value(bt % dbits, db, sgn[k][0]);
- src = value(bt % sbits, sb, sgn[k][1]);
- i += (*emit)(self, arg, &insns[i],
- dst, src);
- }
- /* Append tail instructions */
- insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
- insns[i++] = BPF_EXIT_INSN();
- BUG_ON(i > len);
- self->u.ptr.insns = insns;
- self->u.ptr.len = i;
- return 0;
- }
- /*
- * Block size parameters used in pattern tests below. une as needed to
- * increase/reduce the number combinations tested, see following examples.
- * block values per operand MSB
- * ----------------------------------------
- * 0 none
- * 1 (1 << MSB)
- * 2 (1 << MSB) + [-1, 0]
- * 3 (1 << MSB) + [-1, 0, 1]
- */
- #define PATTERN_BLOCK1 1
- #define PATTERN_BLOCK2 5
- /* Number of test runs for a pattern test */
- #define NR_PATTERN_RUNS 1
- /*
- * Exhaustive tests of ALU operations for all combinations of power-of-two
- * magnitudes of the operands, both for positive and negative values. The
- * test is designed to verify e.g. the ALU and ALU64 operations for JITs that
- * emit different code depending on the magnitude of the immediate value.
- */
- static int __bpf_emit_alu64_imm(struct bpf_test *self, void *arg,
- struct bpf_insn *insns, s64 dst, s64 imm)
- {
- int op = *(int *)arg;
- int i = 0;
- u64 res;
- if (!insns)
- return 7;
- if (__bpf_alu_result(&res, dst, (s32)imm, op)) {
- i += __bpf_ld_imm64(&insns[i], R1, dst);
- i += __bpf_ld_imm64(&insns[i], R3, res);
- insns[i++] = BPF_ALU64_IMM(op, R1, imm);
- insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
- insns[i++] = BPF_EXIT_INSN();
- }
- return i;
- }
- static int __bpf_emit_alu32_imm(struct bpf_test *self, void *arg,
- struct bpf_insn *insns, s64 dst, s64 imm)
- {
- int op = *(int *)arg;
- int i = 0;
- u64 res;
- if (!insns)
- return 7;
- if (__bpf_alu_result(&res, (u32)dst, (u32)imm, op)) {
- i += __bpf_ld_imm64(&insns[i], R1, dst);
- i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
- insns[i++] = BPF_ALU32_IMM(op, R1, imm);
- insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
- insns[i++] = BPF_EXIT_INSN();
- }
- return i;
- }
- static int __bpf_emit_alu64_reg(struct bpf_test *self, void *arg,
- struct bpf_insn *insns, s64 dst, s64 src)
- {
- int op = *(int *)arg;
- int i = 0;
- u64 res;
- if (!insns)
- return 9;
- if (__bpf_alu_result(&res, dst, src, op)) {
- i += __bpf_ld_imm64(&insns[i], R1, dst);
- i += __bpf_ld_imm64(&insns[i], R2, src);
- i += __bpf_ld_imm64(&insns[i], R3, res);
- insns[i++] = BPF_ALU64_REG(op, R1, R2);
- insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
- insns[i++] = BPF_EXIT_INSN();
- }
- return i;
- }
- static int __bpf_emit_alu32_reg(struct bpf_test *self, void *arg,
- struct bpf_insn *insns, s64 dst, s64 src)
- {
- int op = *(int *)arg;
- int i = 0;
- u64 res;
- if (!insns)
- return 9;
- if (__bpf_alu_result(&res, (u32)dst, (u32)src, op)) {
- i += __bpf_ld_imm64(&insns[i], R1, dst);
- i += __bpf_ld_imm64(&insns[i], R2, src);
- i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
- insns[i++] = BPF_ALU32_REG(op, R1, R2);
- insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
- insns[i++] = BPF_EXIT_INSN();
- }
- return i;
- }
- static int __bpf_fill_alu64_imm(struct bpf_test *self, int op)
- {
- return __bpf_fill_pattern(self, &op, 64, 32,
- PATTERN_BLOCK1, PATTERN_BLOCK2,
- &__bpf_emit_alu64_imm);
- }
- static int __bpf_fill_alu32_imm(struct bpf_test *self, int op)
- {
- return __bpf_fill_pattern(self, &op, 64, 32,
- PATTERN_BLOCK1, PATTERN_BLOCK2,
- &__bpf_emit_alu32_imm);
- }
- static int __bpf_fill_alu64_reg(struct bpf_test *self, int op)
- {
- return __bpf_fill_pattern(self, &op, 64, 64,
- PATTERN_BLOCK1, PATTERN_BLOCK2,
- &__bpf_emit_alu64_reg);
- }
- static int __bpf_fill_alu32_reg(struct bpf_test *self, int op)
- {
- return __bpf_fill_pattern(self, &op, 64, 64,
- PATTERN_BLOCK1, PATTERN_BLOCK2,
- &__bpf_emit_alu32_reg);
- }
- /* ALU64 immediate operations */
- static int bpf_fill_alu64_mov_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu64_imm(self, BPF_MOV);
- }
- static int bpf_fill_alu64_and_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu64_imm(self, BPF_AND);
- }
- static int bpf_fill_alu64_or_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu64_imm(self, BPF_OR);
- }
- static int bpf_fill_alu64_xor_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu64_imm(self, BPF_XOR);
- }
- static int bpf_fill_alu64_add_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu64_imm(self, BPF_ADD);
- }
- static int bpf_fill_alu64_sub_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu64_imm(self, BPF_SUB);
- }
- static int bpf_fill_alu64_mul_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu64_imm(self, BPF_MUL);
- }
- static int bpf_fill_alu64_div_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu64_imm(self, BPF_DIV);
- }
- static int bpf_fill_alu64_mod_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu64_imm(self, BPF_MOD);
- }
- /* ALU32 immediate operations */
- static int bpf_fill_alu32_mov_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu32_imm(self, BPF_MOV);
- }
- static int bpf_fill_alu32_and_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu32_imm(self, BPF_AND);
- }
- static int bpf_fill_alu32_or_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu32_imm(self, BPF_OR);
- }
- static int bpf_fill_alu32_xor_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu32_imm(self, BPF_XOR);
- }
- static int bpf_fill_alu32_add_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu32_imm(self, BPF_ADD);
- }
- static int bpf_fill_alu32_sub_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu32_imm(self, BPF_SUB);
- }
- static int bpf_fill_alu32_mul_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu32_imm(self, BPF_MUL);
- }
- static int bpf_fill_alu32_div_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu32_imm(self, BPF_DIV);
- }
- static int bpf_fill_alu32_mod_imm(struct bpf_test *self)
- {
- return __bpf_fill_alu32_imm(self, BPF_MOD);
- }
- /* ALU64 register operations */
- static int bpf_fill_alu64_mov_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu64_reg(self, BPF_MOV);
- }
- static int bpf_fill_alu64_and_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu64_reg(self, BPF_AND);
- }
- static int bpf_fill_alu64_or_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu64_reg(self, BPF_OR);
- }
- static int bpf_fill_alu64_xor_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu64_reg(self, BPF_XOR);
- }
- static int bpf_fill_alu64_add_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu64_reg(self, BPF_ADD);
- }
- static int bpf_fill_alu64_sub_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu64_reg(self, BPF_SUB);
- }
- static int bpf_fill_alu64_mul_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu64_reg(self, BPF_MUL);
- }
- static int bpf_fill_alu64_div_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu64_reg(self, BPF_DIV);
- }
- static int bpf_fill_alu64_mod_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu64_reg(self, BPF_MOD);
- }
- /* ALU32 register operations */
- static int bpf_fill_alu32_mov_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu32_reg(self, BPF_MOV);
- }
- static int bpf_fill_alu32_and_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu32_reg(self, BPF_AND);
- }
- static int bpf_fill_alu32_or_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu32_reg(self, BPF_OR);
- }
- static int bpf_fill_alu32_xor_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu32_reg(self, BPF_XOR);
- }
- static int bpf_fill_alu32_add_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu32_reg(self, BPF_ADD);
- }
- static int bpf_fill_alu32_sub_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu32_reg(self, BPF_SUB);
- }
- static int bpf_fill_alu32_mul_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu32_reg(self, BPF_MUL);
- }
- static int bpf_fill_alu32_div_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu32_reg(self, BPF_DIV);
- }
- static int bpf_fill_alu32_mod_reg(struct bpf_test *self)
- {
- return __bpf_fill_alu32_reg(self, BPF_MOD);
- }
- /*
- * Test JITs that implement complex ALU operations as function
- * calls, and must re-arrange operands for argument passing.
- */
- static int __bpf_fill_alu_imm_regs(struct bpf_test *self, u8 op, bool alu32)
- {
- int len = 2 + 10 * 10;
- struct bpf_insn *insns;
- u64 dst, res;
- int i = 0;
- u32 imm;
- int rd;
- insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
- if (!insns)
- return -ENOMEM;
- /* Operand and result values according to operation */
- if (alu32)
- dst = 0x76543210U;
- else
- dst = 0x7edcba9876543210ULL;
- imm = 0x01234567U;
- if (op == BPF_LSH || op == BPF_RSH || op == BPF_ARSH)
- imm &= 31;
- __bpf_alu_result(&res, dst, imm, op);
- if (alu32)
- res = (u32)res;
- /* Check all operand registers */
- for (rd = R0; rd <= R9; rd++) {
- i += __bpf_ld_imm64(&insns[i], rd, dst);
- if (alu32)
- insns[i++] = BPF_ALU32_IMM(op, rd, imm);
- else
- insns[i++] = BPF_ALU64_IMM(op, rd, imm);
- insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, res, 2);
- insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
- insns[i++] = BPF_EXIT_INSN();
- insns[i++] = BPF_ALU64_IMM(BPF_RSH, rd, 32);
- insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, res >> 32, 2);
- insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
- insns[i++] = BPF_EXIT_INSN();
- }
- insns[i++] = BPF_MOV64_IMM(R0, 1);
- insns[i++] = BPF_EXIT_INSN();
- self->u.ptr.insns = insns;
- self->u.ptr.len = len;
- BUG_ON(i != len);
- return 0;
- }
- /* ALU64 K registers */
- static int bpf_fill_alu64_mov_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_MOV, false);
- }
- static int bpf_fill_alu64_and_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_AND, false);
- }
- static int bpf_fill_alu64_or_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_OR, false);
- }
- static int bpf_fill_alu64_xor_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_XOR, false);
- }
- static int bpf_fill_alu64_lsh_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_LSH, false);
- }
- static int bpf_fill_alu64_rsh_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_RSH, false);
- }
- static int bpf_fill_alu64_arsh_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_ARSH, false);
- }
- static int bpf_fill_alu64_add_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_ADD, false);
- }
- static int bpf_fill_alu64_sub_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_SUB, false);
- }
- static int bpf_fill_alu64_mul_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_MUL, false);
- }
- static int bpf_fill_alu64_div_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_DIV, false);
- }
- static int bpf_fill_alu64_mod_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_MOD, false);
- }
- /* ALU32 K registers */
- static int bpf_fill_alu32_mov_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_MOV, true);
- }
- static int bpf_fill_alu32_and_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_AND, true);
- }
- static int bpf_fill_alu32_or_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_OR, true);
- }
- static int bpf_fill_alu32_xor_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_XOR, true);
- }
- static int bpf_fill_alu32_lsh_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_LSH, true);
- }
- static int bpf_fill_alu32_rsh_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_RSH, true);
- }
- static int bpf_fill_alu32_arsh_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_ARSH, true);
- }
- static int bpf_fill_alu32_add_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_ADD, true);
- }
- static int bpf_fill_alu32_sub_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_SUB, true);
- }
- static int bpf_fill_alu32_mul_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_MUL, true);
- }
- static int bpf_fill_alu32_div_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_DIV, true);
- }
- static int bpf_fill_alu32_mod_imm_regs(struct bpf_test *self)
- {
- return __bpf_fill_alu_imm_regs(self, BPF_MOD, true);
- }
- /*
- * Test JITs that implement complex ALU operations as function
- * calls, and must re-arrange operands for argument passing.
- */
- static int __bpf_fill_alu_reg_pairs(struct bpf_test *self, u8 op, bool alu32)
- {
- int len = 2 + 10 * 10 * 12;
- u64 dst, src, res, same;
- struct bpf_insn *insns;
- int rd, rs;
- int i = 0;
- insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
- if (!insns)
- return -ENOMEM;
- /* Operand and result values according to operation */
- if (alu32) {
- dst = 0x76543210U;
- src = 0x01234567U;
- } else {
- dst = 0x7edcba9876543210ULL;
- src = 0x0123456789abcdefULL;
- }
- if (op == BPF_LSH || op == BPF_RSH || op == BPF_ARSH)
- src &= 31;
- __bpf_alu_result(&res, dst, src, op);
- __bpf_alu_result(&same, src, src, op);
- if (alu32) {
- res = (u32)res;
- same = (u32)same;
- }
- /* Check all combinations of operand registers */
- for (rd = R0; rd <= R9; rd++) {
- for (rs = R0; rs <= R9; rs++) {
- u64 val = rd == rs ? same : res;
- i += __bpf_ld_imm64(&insns[i], rd, dst);
- i += __bpf_ld_imm64(&insns[i], rs, src);
- if (alu32)
- insns[i++] = BPF_ALU32_REG(op, rd, rs);
- else
- insns[i++] = BPF_ALU64_REG(op, rd, rs);
- insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, val, 2);
- insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
- insns[i++] = BPF_EXIT_INSN();
- insns[i++] = BPF_ALU64_IMM(BPF_RSH, rd, 32);
- insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, val >> 32, 2);
- insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
- insns[i++] = BPF_EXIT_INSN();
- }
- }
- insns[i++] = BPF_MOV64_IMM(R0, 1);
- insns[i++] = BPF_EXIT_INSN();
- self->u.ptr.insns = insns;
- self->u.ptr.len = len;
- BUG_ON(i != len);
- return 0;
- }
- /* ALU64 X register combinations */
- static int bpf_fill_alu64_mov_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_MOV, false);
- }
- static int bpf_fill_alu64_and_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_AND, false);
- }
- static int bpf_fill_alu64_or_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_OR, false);
- }
- static int bpf_fill_alu64_xor_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_XOR, false);
- }
- static int bpf_fill_alu64_lsh_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_LSH, false);
- }
- static int bpf_fill_alu64_rsh_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_RSH, false);
- }
- static int bpf_fill_alu64_arsh_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_ARSH, false);
- }
- static int bpf_fill_alu64_add_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_ADD, false);
- }
- static int bpf_fill_alu64_sub_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_SUB, false);
- }
- static int bpf_fill_alu64_mul_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_MUL, false);
- }
- static int bpf_fill_alu64_div_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_DIV, false);
- }
- static int bpf_fill_alu64_mod_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_MOD, false);
- }
- /* ALU32 X register combinations */
- static int bpf_fill_alu32_mov_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_MOV, true);
- }
- static int bpf_fill_alu32_and_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_AND, true);
- }
- static int bpf_fill_alu32_or_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_OR, true);
- }
- static int bpf_fill_alu32_xor_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_XOR, true);
- }
- static int bpf_fill_alu32_lsh_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_LSH, true);
- }
- static int bpf_fill_alu32_rsh_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_RSH, true);
- }
- static int bpf_fill_alu32_arsh_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_ARSH, true);
- }
- static int bpf_fill_alu32_add_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_ADD, true);
- }
- static int bpf_fill_alu32_sub_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_SUB, true);
- }
- static int bpf_fill_alu32_mul_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_MUL, true);
- }
- static int bpf_fill_alu32_div_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_DIV, true);
- }
- static int bpf_fill_alu32_mod_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_alu_reg_pairs(self, BPF_MOD, true);
- }
- /*
- * Exhaustive tests of atomic operations for all power-of-two operand
- * magnitudes, both for positive and negative values.
- */
- static int __bpf_emit_atomic64(struct bpf_test *self, void *arg,
- struct bpf_insn *insns, s64 dst, s64 src)
- {
- int op = *(int *)arg;
- u64 keep, fetch, res;
- int i = 0;
- if (!insns)
- return 21;
- switch (op) {
- case BPF_XCHG:
- res = src;
- break;
- default:
- __bpf_alu_result(&res, dst, src, BPF_OP(op));
- }
- keep = 0x0123456789abcdefULL;
- if (op & BPF_FETCH)
- fetch = dst;
- else
- fetch = src;
- i += __bpf_ld_imm64(&insns[i], R0, keep);
- i += __bpf_ld_imm64(&insns[i], R1, dst);
- i += __bpf_ld_imm64(&insns[i], R2, src);
- i += __bpf_ld_imm64(&insns[i], R3, res);
- i += __bpf_ld_imm64(&insns[i], R4, fetch);
- i += __bpf_ld_imm64(&insns[i], R5, keep);
- insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8);
- insns[i++] = BPF_ATOMIC_OP(BPF_DW, op, R10, R2, -8);
- insns[i++] = BPF_LDX_MEM(BPF_DW, R1, R10, -8);
- insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
- insns[i++] = BPF_EXIT_INSN();
- insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R4, 1);
- insns[i++] = BPF_EXIT_INSN();
- insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1);
- insns[i++] = BPF_EXIT_INSN();
- return i;
- }
- static int __bpf_emit_atomic32(struct bpf_test *self, void *arg,
- struct bpf_insn *insns, s64 dst, s64 src)
- {
- int op = *(int *)arg;
- u64 keep, fetch, res;
- int i = 0;
- if (!insns)
- return 21;
- switch (op) {
- case BPF_XCHG:
- res = src;
- break;
- default:
- __bpf_alu_result(&res, (u32)dst, (u32)src, BPF_OP(op));
- }
- keep = 0x0123456789abcdefULL;
- if (op & BPF_FETCH)
- fetch = (u32)dst;
- else
- fetch = src;
- i += __bpf_ld_imm64(&insns[i], R0, keep);
- i += __bpf_ld_imm64(&insns[i], R1, (u32)dst);
- i += __bpf_ld_imm64(&insns[i], R2, src);
- i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
- i += __bpf_ld_imm64(&insns[i], R4, fetch);
- i += __bpf_ld_imm64(&insns[i], R5, keep);
- insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4);
- insns[i++] = BPF_ATOMIC_OP(BPF_W, op, R10, R2, -4);
- insns[i++] = BPF_LDX_MEM(BPF_W, R1, R10, -4);
- insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
- insns[i++] = BPF_EXIT_INSN();
- insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R4, 1);
- insns[i++] = BPF_EXIT_INSN();
- insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1);
- insns[i++] = BPF_EXIT_INSN();
- return i;
- }
- static int __bpf_emit_cmpxchg64(struct bpf_test *self, void *arg,
- struct bpf_insn *insns, s64 dst, s64 src)
- {
- int i = 0;
- if (!insns)
- return 23;
- i += __bpf_ld_imm64(&insns[i], R0, ~dst);
- i += __bpf_ld_imm64(&insns[i], R1, dst);
- i += __bpf_ld_imm64(&insns[i], R2, src);
- /* Result unsuccessful */
- insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8);
- insns[i++] = BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -8);
- insns[i++] = BPF_LDX_MEM(BPF_DW, R3, R10, -8);
- insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 2);
- insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
- insns[i++] = BPF_EXIT_INSN();
- insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R3, 2);
- insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
- insns[i++] = BPF_EXIT_INSN();
- /* Result successful */
- insns[i++] = BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -8);
- insns[i++] = BPF_LDX_MEM(BPF_DW, R3, R10, -8);
- insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R3, 2);
- insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
- insns[i++] = BPF_EXIT_INSN();
- insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
- insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
- insns[i++] = BPF_EXIT_INSN();
- return i;
- }
- static int __bpf_emit_cmpxchg32(struct bpf_test *self, void *arg,
- struct bpf_insn *insns, s64 dst, s64 src)
- {
- int i = 0;
- if (!insns)
- return 27;
- i += __bpf_ld_imm64(&insns[i], R0, ~dst);
- i += __bpf_ld_imm64(&insns[i], R1, (u32)dst);
- i += __bpf_ld_imm64(&insns[i], R2, src);
- /* Result unsuccessful */
- insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4);
- insns[i++] = BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R2, -4);
- insns[i++] = BPF_ZEXT_REG(R0), /* Zext always inserted by verifier */
- insns[i++] = BPF_LDX_MEM(BPF_W, R3, R10, -4);
- insns[i++] = BPF_JMP32_REG(BPF_JEQ, R1, R3, 2);
- insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
- insns[i++] = BPF_EXIT_INSN();
- insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R3, 2);
- insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
- insns[i++] = BPF_EXIT_INSN();
- /* Result successful */
- i += __bpf_ld_imm64(&insns[i], R0, dst);
- insns[i++] = BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R2, -4);
- insns[i++] = BPF_ZEXT_REG(R0), /* Zext always inserted by verifier */
- insns[i++] = BPF_LDX_MEM(BPF_W, R3, R10, -4);
- insns[i++] = BPF_JMP32_REG(BPF_JEQ, R2, R3, 2);
- insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
- insns[i++] = BPF_EXIT_INSN();
- insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
- insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
- insns[i++] = BPF_EXIT_INSN();
- return i;
- }
- static int __bpf_fill_atomic64(struct bpf_test *self, int op)
- {
- return __bpf_fill_pattern(self, &op, 64, 64,
- 0, PATTERN_BLOCK2,
- &__bpf_emit_atomic64);
- }
- static int __bpf_fill_atomic32(struct bpf_test *self, int op)
- {
- return __bpf_fill_pattern(self, &op, 64, 64,
- 0, PATTERN_BLOCK2,
- &__bpf_emit_atomic32);
- }
- /* 64-bit atomic operations */
- static int bpf_fill_atomic64_add(struct bpf_test *self)
- {
- return __bpf_fill_atomic64(self, BPF_ADD);
- }
- static int bpf_fill_atomic64_and(struct bpf_test *self)
- {
- return __bpf_fill_atomic64(self, BPF_AND);
- }
- static int bpf_fill_atomic64_or(struct bpf_test *self)
- {
- return __bpf_fill_atomic64(self, BPF_OR);
- }
- static int bpf_fill_atomic64_xor(struct bpf_test *self)
- {
- return __bpf_fill_atomic64(self, BPF_XOR);
- }
- static int bpf_fill_atomic64_add_fetch(struct bpf_test *self)
- {
- return __bpf_fill_atomic64(self, BPF_ADD | BPF_FETCH);
- }
- static int bpf_fill_atomic64_and_fetch(struct bpf_test *self)
- {
- return __bpf_fill_atomic64(self, BPF_AND | BPF_FETCH);
- }
- static int bpf_fill_atomic64_or_fetch(struct bpf_test *self)
- {
- return __bpf_fill_atomic64(self, BPF_OR | BPF_FETCH);
- }
- static int bpf_fill_atomic64_xor_fetch(struct bpf_test *self)
- {
- return __bpf_fill_atomic64(self, BPF_XOR | BPF_FETCH);
- }
- static int bpf_fill_atomic64_xchg(struct bpf_test *self)
- {
- return __bpf_fill_atomic64(self, BPF_XCHG);
- }
- static int bpf_fill_cmpxchg64(struct bpf_test *self)
- {
- return __bpf_fill_pattern(self, NULL, 64, 64, 0, PATTERN_BLOCK2,
- &__bpf_emit_cmpxchg64);
- }
- /* 32-bit atomic operations */
- static int bpf_fill_atomic32_add(struct bpf_test *self)
- {
- return __bpf_fill_atomic32(self, BPF_ADD);
- }
- static int bpf_fill_atomic32_and(struct bpf_test *self)
- {
- return __bpf_fill_atomic32(self, BPF_AND);
- }
- static int bpf_fill_atomic32_or(struct bpf_test *self)
- {
- return __bpf_fill_atomic32(self, BPF_OR);
- }
- static int bpf_fill_atomic32_xor(struct bpf_test *self)
- {
- return __bpf_fill_atomic32(self, BPF_XOR);
- }
- static int bpf_fill_atomic32_add_fetch(struct bpf_test *self)
- {
- return __bpf_fill_atomic32(self, BPF_ADD | BPF_FETCH);
- }
- static int bpf_fill_atomic32_and_fetch(struct bpf_test *self)
- {
- return __bpf_fill_atomic32(self, BPF_AND | BPF_FETCH);
- }
- static int bpf_fill_atomic32_or_fetch(struct bpf_test *self)
- {
- return __bpf_fill_atomic32(self, BPF_OR | BPF_FETCH);
- }
- static int bpf_fill_atomic32_xor_fetch(struct bpf_test *self)
- {
- return __bpf_fill_atomic32(self, BPF_XOR | BPF_FETCH);
- }
- static int bpf_fill_atomic32_xchg(struct bpf_test *self)
- {
- return __bpf_fill_atomic32(self, BPF_XCHG);
- }
- static int bpf_fill_cmpxchg32(struct bpf_test *self)
- {
- return __bpf_fill_pattern(self, NULL, 64, 64, 0, PATTERN_BLOCK2,
- &__bpf_emit_cmpxchg32);
- }
- /*
- * Test JITs that implement ATOMIC operations as function calls or
- * other primitives, and must re-arrange operands for argument passing.
- */
- static int __bpf_fill_atomic_reg_pairs(struct bpf_test *self, u8 width, u8 op)
- {
- struct bpf_insn *insn;
- int len = 2 + 34 * 10 * 10;
- u64 mem, upd, res;
- int rd, rs, i = 0;
- insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
- if (!insn)
- return -ENOMEM;
- /* Operand and memory values */
- if (width == BPF_DW) {
- mem = 0x0123456789abcdefULL;
- upd = 0xfedcba9876543210ULL;
- } else { /* BPF_W */
- mem = 0x01234567U;
- upd = 0x76543210U;
- }
- /* Memory updated according to operation */
- switch (op) {
- case BPF_XCHG:
- res = upd;
- break;
- case BPF_CMPXCHG:
- res = mem;
- break;
- default:
- __bpf_alu_result(&res, mem, upd, BPF_OP(op));
- }
- /* Test all operand registers */
- for (rd = R0; rd <= R9; rd++) {
- for (rs = R0; rs <= R9; rs++) {
- u64 cmp, src;
- /* Initialize value in memory */
- i += __bpf_ld_imm64(&insn[i], R0, mem);
- insn[i++] = BPF_STX_MEM(width, R10, R0, -8);
- /* Initialize registers in order */
- i += __bpf_ld_imm64(&insn[i], R0, ~mem);
- i += __bpf_ld_imm64(&insn[i], rs, upd);
- insn[i++] = BPF_MOV64_REG(rd, R10);
- /* Perform atomic operation */
- insn[i++] = BPF_ATOMIC_OP(width, op, rd, rs, -8);
- if (op == BPF_CMPXCHG && width == BPF_W)
- insn[i++] = BPF_ZEXT_REG(R0);
- /* Check R0 register value */
- if (op == BPF_CMPXCHG)
- cmp = mem; /* Expect value from memory */
- else if (R0 == rd || R0 == rs)
- cmp = 0; /* Aliased, checked below */
- else
- cmp = ~mem; /* Expect value to be preserved */
- if (cmp) {
- insn[i++] = BPF_JMP32_IMM(BPF_JEQ, R0,
- (u32)cmp, 2);
- insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
- insn[i++] = BPF_EXIT_INSN();
- insn[i++] = BPF_ALU64_IMM(BPF_RSH, R0, 32);
- insn[i++] = BPF_JMP32_IMM(BPF_JEQ, R0,
- cmp >> 32, 2);
- insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
- insn[i++] = BPF_EXIT_INSN();
- }
- /* Check source register value */
- if (rs == R0 && op == BPF_CMPXCHG)
- src = 0; /* Aliased with R0, checked above */
- else if (rs == rd && (op == BPF_CMPXCHG ||
- !(op & BPF_FETCH)))
- src = 0; /* Aliased with rd, checked below */
- else if (op == BPF_CMPXCHG)
- src = upd; /* Expect value to be preserved */
- else if (op & BPF_FETCH)
- src = mem; /* Expect fetched value from mem */
- else /* no fetch */
- src = upd; /* Expect value to be preserved */
- if (src) {
- insn[i++] = BPF_JMP32_IMM(BPF_JEQ, rs,
- (u32)src, 2);
- insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
- insn[i++] = BPF_EXIT_INSN();
- insn[i++] = BPF_ALU64_IMM(BPF_RSH, rs, 32);
- insn[i++] = BPF_JMP32_IMM(BPF_JEQ, rs,
- src >> 32, 2);
- insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
- insn[i++] = BPF_EXIT_INSN();
- }
- /* Check destination register value */
- if (!(rd == R0 && op == BPF_CMPXCHG) &&
- !(rd == rs && (op & BPF_FETCH))) {
- insn[i++] = BPF_JMP_REG(BPF_JEQ, rd, R10, 2);
- insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
- insn[i++] = BPF_EXIT_INSN();
- }
- /* Check value in memory */
- if (rs != rd) { /* No aliasing */
- i += __bpf_ld_imm64(&insn[i], R1, res);
- } else if (op == BPF_XCHG) { /* Aliased, XCHG */
- insn[i++] = BPF_MOV64_REG(R1, R10);
- } else if (op == BPF_CMPXCHG) { /* Aliased, CMPXCHG */
- i += __bpf_ld_imm64(&insn[i], R1, mem);
- } else { /* Aliased, ALU oper */
- i += __bpf_ld_imm64(&insn[i], R1, mem);
- insn[i++] = BPF_ALU64_REG(BPF_OP(op), R1, R10);
- }
- insn[i++] = BPF_LDX_MEM(width, R0, R10, -8);
- if (width == BPF_DW)
- insn[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
- else /* width == BPF_W */
- insn[i++] = BPF_JMP32_REG(BPF_JEQ, R0, R1, 2);
- insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
- insn[i++] = BPF_EXIT_INSN();
- }
- }
- insn[i++] = BPF_MOV64_IMM(R0, 1);
- insn[i++] = BPF_EXIT_INSN();
- self->u.ptr.insns = insn;
- self->u.ptr.len = i;
- BUG_ON(i > len);
- return 0;
- }
- /* 64-bit atomic register tests */
- static int bpf_fill_atomic64_add_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_ADD);
- }
- static int bpf_fill_atomic64_and_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_AND);
- }
- static int bpf_fill_atomic64_or_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_OR);
- }
- static int bpf_fill_atomic64_xor_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XOR);
- }
- static int bpf_fill_atomic64_add_fetch_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_ADD | BPF_FETCH);
- }
- static int bpf_fill_atomic64_and_fetch_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_AND | BPF_FETCH);
- }
- static int bpf_fill_atomic64_or_fetch_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_OR | BPF_FETCH);
- }
- static int bpf_fill_atomic64_xor_fetch_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XOR | BPF_FETCH);
- }
- static int bpf_fill_atomic64_xchg_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XCHG);
- }
- static int bpf_fill_atomic64_cmpxchg_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_CMPXCHG);
- }
- /* 32-bit atomic register tests */
- static int bpf_fill_atomic32_add_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_ADD);
- }
- static int bpf_fill_atomic32_and_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_AND);
- }
- static int bpf_fill_atomic32_or_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_OR);
- }
- static int bpf_fill_atomic32_xor_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XOR);
- }
- static int bpf_fill_atomic32_add_fetch_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_ADD | BPF_FETCH);
- }
- static int bpf_fill_atomic32_and_fetch_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_AND | BPF_FETCH);
- }
- static int bpf_fill_atomic32_or_fetch_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_OR | BPF_FETCH);
- }
- static int bpf_fill_atomic32_xor_fetch_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XOR | BPF_FETCH);
- }
- static int bpf_fill_atomic32_xchg_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XCHG);
- }
- static int bpf_fill_atomic32_cmpxchg_reg_pairs(struct bpf_test *self)
- {
- return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_CMPXCHG);
- }
- /*
- * Test the two-instruction 64-bit immediate load operation for all
- * power-of-two magnitudes of the immediate operand. For each MSB, a block
- * of immediate values centered around the power-of-two MSB are tested,
- * both for positive and negative values. The test is designed to verify
- * the operation for JITs that emit different code depending on the magnitude
- * of the immediate value. This is often the case if the native instruction
- * immediate field width is narrower than 32 bits.
- */
- static int bpf_fill_ld_imm64_magn(struct bpf_test *self)
- {
- int block = 64; /* Increase for more tests per MSB position */
- int len = 3 + 8 * 63 * block * 2;
- struct bpf_insn *insn;
- int bit, adj, sign;
- int i = 0;
- insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
- if (!insn)
- return -ENOMEM;
- insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
- for (bit = 0; bit <= 62; bit++) {
- for (adj = -block / 2; adj < block / 2; adj++) {
- for (sign = -1; sign <= 1; sign += 2) {
- s64 imm = sign * ((1LL << bit) + adj);
- /* Perform operation */
- i += __bpf_ld_imm64(&insn[i], R1, imm);
- /* Load reference */
- insn[i++] = BPF_ALU32_IMM(BPF_MOV, R2, imm);
- insn[i++] = BPF_ALU32_IMM(BPF_MOV, R3,
- (u32)(imm >> 32));
- insn[i++] = BPF_ALU64_IMM(BPF_LSH, R3, 32);
- insn[i++] = BPF_ALU64_REG(BPF_OR, R2, R3);
- /* Check result */
- insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
- insn[i++] = BPF_EXIT_INSN();
- }
- }
- }
- insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
- insn[i++] = BPF_EXIT_INSN();
- self->u.ptr.insns = insn;
- self->u.ptr.len = len;
- BUG_ON(i != len);
- return 0;
- }
- /*
- * Test the two-instruction 64-bit immediate load operation for different
- * combinations of bytes. Each byte in the 64-bit word is constructed as
- * (base & mask) | (rand() & ~mask), where rand() is a deterministic LCG.
- * All patterns (base1, mask1) and (base2, mask2) bytes are tested.
- */
- static int __bpf_fill_ld_imm64_bytes(struct bpf_test *self,
- u8 base1, u8 mask1,
- u8 base2, u8 mask2)
- {
- struct bpf_insn *insn;
- int len = 3 + 8 * BIT(8);
- int pattern, index;
- u32 rand = 1;
- int i = 0;
- insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
- if (!insn)
- return -ENOMEM;
- insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
- for (pattern = 0; pattern < BIT(8); pattern++) {
- u64 imm = 0;
- for (index = 0; index < 8; index++) {
- int byte;
- if (pattern & BIT(index))
- byte = (base1 & mask1) | (rand & ~mask1);
- else
- byte = (base2 & mask2) | (rand & ~mask2);
- imm = (imm << 8) | byte;
- }
- /* Update our LCG */
- rand = rand * 1664525 + 1013904223;
- /* Perform operation */
- i += __bpf_ld_imm64(&insn[i], R1, imm);
- /* Load reference */
- insn[i++] = BPF_ALU32_IMM(BPF_MOV, R2, imm);
- insn[i++] = BPF_ALU32_IMM(BPF_MOV, R3, (u32)(imm >> 32));
- insn[i++] = BPF_ALU64_IMM(BPF_LSH, R3, 32);
- insn[i++] = BPF_ALU64_REG(BPF_OR, R2, R3);
- /* Check result */
- insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
- insn[i++] = BPF_EXIT_INSN();
- }
- insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
- insn[i++] = BPF_EXIT_INSN();
- self->u.ptr.insns = insn;
- self->u.ptr.len = len;
- BUG_ON(i != len);
- return 0;
- }
- static int bpf_fill_ld_imm64_checker(struct bpf_test *self)
- {
- return __bpf_fill_ld_imm64_bytes(self, 0, 0xff, 0xff, 0xff);
- }
- static int bpf_fill_ld_imm64_pos_neg(struct bpf_test *self)
- {
- return __bpf_fill_ld_imm64_bytes(self, 1, 0x81, 0x80, 0x80);
- }
- static int bpf_fill_ld_imm64_pos_zero(struct bpf_test *self)
- {
- return __bpf_fill_ld_imm64_bytes(self, 1, 0x81, 0, 0xff);
- }
- static int bpf_fill_ld_imm64_neg_zero(struct bpf_test *self)
- {
- return __bpf_fill_ld_imm64_bytes(self, 0x80, 0x80, 0, 0xff);
- }
- /*
- * Exhaustive tests of JMP operations for all combinations of power-of-two
- * magnitudes of the operands, both for positive and negative values. The
- * test is designed to verify e.g. the JMP and JMP32 operations for JITs that
- * emit different code depending on the magnitude of the immediate value.
- */
- static bool __bpf_match_jmp_cond(s64 v1, s64 v2, u8 op)
- {
- switch (op) {
- case BPF_JSET:
- return !!(v1 & v2);
- case BPF_JEQ:
- return v1 == v2;
- case BPF_JNE:
- return v1 != v2;
- case BPF_JGT:
- return (u64)v1 > (u64)v2;
- case BPF_JGE:
- return (u64)v1 >= (u64)v2;
- case BPF_JLT:
- return (u64)v1 < (u64)v2;
- case BPF_JLE:
- return (u64)v1 <= (u64)v2;
- case BPF_JSGT:
- return v1 > v2;
- case BPF_JSGE:
- return v1 >= v2;
- case BPF_JSLT:
- return v1 < v2;
- case BPF_JSLE:
- return v1 <= v2;
- }
- return false;
- }
- static int __bpf_emit_jmp_imm(struct bpf_test *self, void *arg,
- struct bpf_insn *insns, s64 dst, s64 imm)
- {
- int op = *(int *)arg;
- if (insns) {
- bool match = __bpf_match_jmp_cond(dst, (s32)imm, op);
- int i = 0;
- insns[i++] = BPF_ALU32_IMM(BPF_MOV, R0, match);
- i += __bpf_ld_imm64(&insns[i], R1, dst);
- insns[i++] = BPF_JMP_IMM(op, R1, imm, 1);
- if (!match)
- insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
- insns[i++] = BPF_EXIT_INSN();
- return i;
- }
- return 5 + 1;
- }
- static int __bpf_emit_jmp32_imm(struct bpf_test *self, void *arg,
- struct bpf_insn *insns, s64 dst, s64 imm)
- {
- int op = *(int *)arg;
- if (insns) {
- bool match = __bpf_match_jmp_cond((s32)dst, (s32)imm, op);
- int i = 0;
- i += __bpf_ld_imm64(&insns[i], R1, dst);
- insns[i++] = BPF_JMP32_IMM(op, R1, imm, 1);
- if (!match)
- insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
- insns[i++] = BPF_EXIT_INSN();
- return i;
- }
- return 5;
- }
- static int __bpf_emit_jmp_reg(struct bpf_test *self, void *arg,
- struct bpf_insn *insns, s64 dst, s64 src)
- {
- int op = *(int *)arg;
- if (insns) {
- bool match = __bpf_match_jmp_cond(dst, src, op);
- int i = 0;
- i += __bpf_ld_imm64(&insns[i], R1, dst);
- i += __bpf_ld_imm64(&insns[i], R2, src);
- insns[i++] = BPF_JMP_REG(op, R1, R2, 1);
- if (!match)
- insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
- insns[i++] = BPF_EXIT_INSN();
- return i;
- }
- return 7;
- }
- static int __bpf_emit_jmp32_reg(struct bpf_test *self, void *arg,
- struct bpf_insn *insns, s64 dst, s64 src)
- {
- int op = *(int *)arg;
- if (insns) {
- bool match = __bpf_match_jmp_cond((s32)dst, (s32)src, op);
- int i = 0;
- i += __bpf_ld_imm64(&insns[i], R1, dst);
- i += __bpf_ld_imm64(&insns[i], R2, src);
- insns[i++] = BPF_JMP32_REG(op, R1, R2, 1);
- if (!match)
- insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
- insns[i++] = BPF_EXIT_INSN();
- return i;
- }
- return 7;
- }
- static int __bpf_fill_jmp_imm(struct bpf_test *self, int op)
- {
- return __bpf_fill_pattern(self, &op, 64, 32,
- PATTERN_BLOCK1, PATTERN_BLOCK2,
- &__bpf_emit_jmp_imm);
- }
- static int __bpf_fill_jmp32_imm(struct bpf_test *self, int op)
- {
- return __bpf_fill_pattern(self, &op, 64, 32,
- PATTERN_BLOCK1, PATTERN_BLOCK2,
- &__bpf_emit_jmp32_imm);
- }
- static int __bpf_fill_jmp_reg(struct bpf_test *self, int op)
- {
- return __bpf_fill_pattern(self, &op, 64, 64,
- PATTERN_BLOCK1, PATTERN_BLOCK2,
- &__bpf_emit_jmp_reg);
- }
- static int __bpf_fill_jmp32_reg(struct bpf_test *self, int op)
- {
- return __bpf_fill_pattern(self, &op, 64, 64,
- PATTERN_BLOCK1, PATTERN_BLOCK2,
- &__bpf_emit_jmp32_reg);
- }
- /* JMP immediate tests */
- static int bpf_fill_jmp_jset_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp_imm(self, BPF_JSET);
- }
- static int bpf_fill_jmp_jeq_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp_imm(self, BPF_JEQ);
- }
- static int bpf_fill_jmp_jne_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp_imm(self, BPF_JNE);
- }
- static int bpf_fill_jmp_jgt_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp_imm(self, BPF_JGT);
- }
- static int bpf_fill_jmp_jge_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp_imm(self, BPF_JGE);
- }
- static int bpf_fill_jmp_jlt_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp_imm(self, BPF_JLT);
- }
- static int bpf_fill_jmp_jle_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp_imm(self, BPF_JLE);
- }
- static int bpf_fill_jmp_jsgt_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp_imm(self, BPF_JSGT);
- }
- static int bpf_fill_jmp_jsge_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp_imm(self, BPF_JSGE);
- }
- static int bpf_fill_jmp_jslt_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp_imm(self, BPF_JSLT);
- }
- static int bpf_fill_jmp_jsle_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp_imm(self, BPF_JSLE);
- }
- /* JMP32 immediate tests */
- static int bpf_fill_jmp32_jset_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_imm(self, BPF_JSET);
- }
- static int bpf_fill_jmp32_jeq_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_imm(self, BPF_JEQ);
- }
- static int bpf_fill_jmp32_jne_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_imm(self, BPF_JNE);
- }
- static int bpf_fill_jmp32_jgt_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_imm(self, BPF_JGT);
- }
- static int bpf_fill_jmp32_jge_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_imm(self, BPF_JGE);
- }
- static int bpf_fill_jmp32_jlt_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_imm(self, BPF_JLT);
- }
- static int bpf_fill_jmp32_jle_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_imm(self, BPF_JLE);
- }
- static int bpf_fill_jmp32_jsgt_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_imm(self, BPF_JSGT);
- }
- static int bpf_fill_jmp32_jsge_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_imm(self, BPF_JSGE);
- }
- static int bpf_fill_jmp32_jslt_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_imm(self, BPF_JSLT);
- }
- static int bpf_fill_jmp32_jsle_imm(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_imm(self, BPF_JSLE);
- }
- /* JMP register tests */
- static int bpf_fill_jmp_jset_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp_reg(self, BPF_JSET);
- }
- static int bpf_fill_jmp_jeq_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp_reg(self, BPF_JEQ);
- }
- static int bpf_fill_jmp_jne_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp_reg(self, BPF_JNE);
- }
- static int bpf_fill_jmp_jgt_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp_reg(self, BPF_JGT);
- }
- static int bpf_fill_jmp_jge_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp_reg(self, BPF_JGE);
- }
- static int bpf_fill_jmp_jlt_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp_reg(self, BPF_JLT);
- }
- static int bpf_fill_jmp_jle_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp_reg(self, BPF_JLE);
- }
- static int bpf_fill_jmp_jsgt_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp_reg(self, BPF_JSGT);
- }
- static int bpf_fill_jmp_jsge_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp_reg(self, BPF_JSGE);
- }
- static int bpf_fill_jmp_jslt_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp_reg(self, BPF_JSLT);
- }
- static int bpf_fill_jmp_jsle_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp_reg(self, BPF_JSLE);
- }
- /* JMP32 register tests */
- static int bpf_fill_jmp32_jset_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_reg(self, BPF_JSET);
- }
- static int bpf_fill_jmp32_jeq_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_reg(self, BPF_JEQ);
- }
- static int bpf_fill_jmp32_jne_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_reg(self, BPF_JNE);
- }
- static int bpf_fill_jmp32_jgt_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_reg(self, BPF_JGT);
- }
- static int bpf_fill_jmp32_jge_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_reg(self, BPF_JGE);
- }
- static int bpf_fill_jmp32_jlt_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_reg(self, BPF_JLT);
- }
- static int bpf_fill_jmp32_jle_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_reg(self, BPF_JLE);
- }
- static int bpf_fill_jmp32_jsgt_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_reg(self, BPF_JSGT);
- }
- static int bpf_fill_jmp32_jsge_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_reg(self, BPF_JSGE);
- }
- static int bpf_fill_jmp32_jslt_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_reg(self, BPF_JSLT);
- }
- static int bpf_fill_jmp32_jsle_reg(struct bpf_test *self)
- {
- return __bpf_fill_jmp32_reg(self, BPF_JSLE);
- }
- /*
- * Set up a sequence of staggered jumps, forwards and backwards with
- * increasing offset. This tests the conversion of relative jumps to
- * JITed native jumps. On some architectures, for example MIPS, a large
- * PC-relative jump offset may overflow the immediate field of the native
- * conditional branch instruction, triggering a conversion to use an
- * absolute jump instead. Since this changes the jump offsets, another
- * offset computation pass is necessary, and that may in turn trigger
- * another branch conversion. This jump sequence is particularly nasty
- * in that regard.
- *
- * The sequence generation is parameterized by size and jump type.
- * The size must be even, and the expected result is always size + 1.
- * Below is an example with size=8 and result=9.
- *
- * ________________________Start
- * R0 = 0
- * R1 = r1
- * R2 = r2
- * ,------- JMP +4 * 3______________Preamble: 4 insns
- * ,----------|-ind 0- if R0 != 7 JMP 8 * 3 + 1 <--------------------.
- * | | R0 = 8 |
- * | | JMP +7 * 3 ------------------------.
- * | ,--------|-----1- if R0 != 5 JMP 7 * 3 + 1 <--------------. | |
- * | | | R0 = 6 | | |
- * | | | JMP +5 * 3 ------------------. | |
- * | | ,------|-----2- if R0 != 3 JMP 6 * 3 + 1 <--------. | | | |
- * | | | | R0 = 4 | | | | |
- * | | | | JMP +3 * 3 ------------. | | | |
- * | | | ,----|-----3- if R0 != 1 JMP 5 * 3 + 1 <--. | | | | | |
- * | | | | | R0 = 2 | | | | | | |
- * | | | | | JMP +1 * 3 ------. | | | | | |
- * | | | | ,--t=====4> if R0 != 0 JMP 4 * 3 + 1 1 2 3 4 5 6 7 8 loc
- * | | | | | R0 = 1 -1 +2 -3 +4 -5 +6 -7 +8 off
- * | | | | | JMP -2 * 3 ---' | | | | | | |
- * | | | | | ,------5- if R0 != 2 JMP 3 * 3 + 1 <-----' | | | | | |
- * | | | | | | R0 = 3 | | | | | |
- * | | | | | | JMP -4 * 3 ---------' | | | | |
- * | | | | | | ,----6- if R0 != 4 JMP 2 * 3 + 1 <-----------' | | | |
- * | | | | | | | R0 = 5 | | | |
- * | | | | | | | JMP -6 * 3 ---------------' | | |
- * | | | | | | | ,--7- if R0 != 6 JMP 1 * 3 + 1 <-----------------' | |
- * | | | | | | | | R0 = 7 | |
- * | | Error | | | JMP -8 * 3 ---------------------' |
- * | | paths | | | ,8- if R0 != 8 JMP 0 * 3 + 1 <-----------------------'
- * | | | | | | | | | R0 = 9__________________Sequence: 3 * size - 1 insns
- * `-+-+-+-+-+-+-+-+-> EXIT____________________Return: 1 insn
- *
- */
- /* The maximum size parameter */
- #define MAX_STAGGERED_JMP_SIZE ((0x7fff / 3) & ~1)
- /* We use a reduced number of iterations to get a reasonable execution time */
- #define NR_STAGGERED_JMP_RUNS 10
- static int __bpf_fill_staggered_jumps(struct bpf_test *self,
- const struct bpf_insn *jmp,
- u64 r1, u64 r2)
- {
- int size = self->test[0].result - 1;
- int len = 4 + 3 * (size + 1);
- struct bpf_insn *insns;
- int off, ind;
- insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
- if (!insns)
- return -ENOMEM;
- /* Preamble */
- insns[0] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
- insns[1] = BPF_ALU64_IMM(BPF_MOV, R1, r1);
- insns[2] = BPF_ALU64_IMM(BPF_MOV, R2, r2);
- insns[3] = BPF_JMP_IMM(BPF_JA, 0, 0, 3 * size / 2);
- /* Sequence */
- for (ind = 0, off = size; ind <= size; ind++, off -= 2) {
- struct bpf_insn *ins = &insns[4 + 3 * ind];
- int loc;
- if (off == 0)
- off--;
- loc = abs(off);
- ins[0] = BPF_JMP_IMM(BPF_JNE, R0, loc - 1,
- 3 * (size - ind) + 1);
- ins[1] = BPF_ALU64_IMM(BPF_MOV, R0, loc);
- ins[2] = *jmp;
- ins[2].off = 3 * (off - 1);
- }
- /* Return */
- insns[len - 1] = BPF_EXIT_INSN();
- self->u.ptr.insns = insns;
- self->u.ptr.len = len;
- return 0;
- }
- /* 64-bit unconditional jump */
- static int bpf_fill_staggered_ja(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_IMM(BPF_JA, 0, 0, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 0, 0);
- }
- /* 64-bit immediate jumps */
- static int bpf_fill_staggered_jeq_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_IMM(BPF_JEQ, R1, 1234, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
- }
- static int bpf_fill_staggered_jne_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_IMM(BPF_JNE, R1, 1234, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 4321, 0);
- }
- static int bpf_fill_staggered_jset_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSET, R1, 0x82, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0);
- }
- static int bpf_fill_staggered_jgt_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGT, R1, 1234, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 0);
- }
- static int bpf_fill_staggered_jge_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGE, R1, 1234, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
- }
- static int bpf_fill_staggered_jlt_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLT, R1, 0x80000000, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
- }
- static int bpf_fill_staggered_jle_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLE, R1, 1234, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
- }
- static int bpf_fill_staggered_jsgt_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGT, R1, -2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
- }
- static int bpf_fill_staggered_jsge_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGE, R1, -2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
- }
- static int bpf_fill_staggered_jslt_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLT, R1, -1, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
- }
- static int bpf_fill_staggered_jsle_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLE, R1, -1, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
- }
- /* 64-bit register jumps */
- static int bpf_fill_staggered_jeq_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_REG(BPF_JEQ, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
- }
- static int bpf_fill_staggered_jne_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_REG(BPF_JNE, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 4321, 1234);
- }
- static int bpf_fill_staggered_jset_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_REG(BPF_JSET, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0x82);
- }
- static int bpf_fill_staggered_jgt_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_REG(BPF_JGT, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 1234);
- }
- static int bpf_fill_staggered_jge_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_REG(BPF_JGE, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
- }
- static int bpf_fill_staggered_jlt_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_REG(BPF_JLT, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0x80000000);
- }
- static int bpf_fill_staggered_jle_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_REG(BPF_JLE, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
- }
- static int bpf_fill_staggered_jsgt_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGT, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, -1, -2);
- }
- static int bpf_fill_staggered_jsge_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGE, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, -2, -2);
- }
- static int bpf_fill_staggered_jslt_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLT, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, -2, -1);
- }
- static int bpf_fill_staggered_jsle_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLE, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, -1, -1);
- }
- /* 32-bit immediate jumps */
- static int bpf_fill_staggered_jeq32_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JEQ, R1, 1234, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
- }
- static int bpf_fill_staggered_jne32_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JNE, R1, 1234, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 4321, 0);
- }
- static int bpf_fill_staggered_jset32_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSET, R1, 0x82, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0);
- }
- static int bpf_fill_staggered_jgt32_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGT, R1, 1234, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 0);
- }
- static int bpf_fill_staggered_jge32_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGE, R1, 1234, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
- }
- static int bpf_fill_staggered_jlt32_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLT, R1, 0x80000000, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
- }
- static int bpf_fill_staggered_jle32_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLE, R1, 1234, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
- }
- static int bpf_fill_staggered_jsgt32_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGT, R1, -2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
- }
- static int bpf_fill_staggered_jsge32_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGE, R1, -2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
- }
- static int bpf_fill_staggered_jslt32_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLT, R1, -1, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
- }
- static int bpf_fill_staggered_jsle32_imm(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLE, R1, -1, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
- }
- /* 32-bit register jumps */
- static int bpf_fill_staggered_jeq32_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_REG(BPF_JEQ, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
- }
- static int bpf_fill_staggered_jne32_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_REG(BPF_JNE, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 4321, 1234);
- }
- static int bpf_fill_staggered_jset32_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSET, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0x82);
- }
- static int bpf_fill_staggered_jgt32_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGT, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 1234);
- }
- static int bpf_fill_staggered_jge32_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGE, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
- }
- static int bpf_fill_staggered_jlt32_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLT, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0x80000000);
- }
- static int bpf_fill_staggered_jle32_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLE, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
- }
- static int bpf_fill_staggered_jsgt32_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGT, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, -1, -2);
- }
- static int bpf_fill_staggered_jsge32_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGE, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, -2, -2);
- }
- static int bpf_fill_staggered_jslt32_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLT, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, -2, -1);
- }
- static int bpf_fill_staggered_jsle32_reg(struct bpf_test *self)
- {
- struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLE, R1, R2, 0);
- return __bpf_fill_staggered_jumps(self, &jmp, -1, -1);
- }
- static struct bpf_test tests[] = {
- {
- "TAX",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_IMM, 1),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_LD | BPF_IMM, 2),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_ALU | BPF_NEG, 0), /* A == -3 */
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_LD | BPF_LEN, 0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_MISC | BPF_TAX, 0), /* X == len - 3 */
- BPF_STMT(BPF_LD | BPF_B | BPF_IND, 1),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC,
- { 10, 20, 30, 40, 50 },
- { { 2, 10 }, { 3, 20 }, { 4, 30 } },
- },
- {
- "TXA",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_LEN, 0),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_RET | BPF_A, 0) /* A == len * 2 */
- },
- CLASSIC,
- { 10, 20, 30, 40, 50 },
- { { 1, 2 }, { 3, 6 }, { 4, 8 } },
- },
- {
- "ADD_SUB_MUL_K",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_IMM, 1),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 2),
- BPF_STMT(BPF_LDX | BPF_IMM, 3),
- BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0xffffffff),
- BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 3),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC | FLAG_NO_DATA,
- { },
- { { 0, 0xfffffffd } }
- },
- {
- "DIV_MOD_KX",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_IMM, 8),
- BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 2),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
- BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
- BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x70000000),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
- BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
- BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x70000000),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC | FLAG_NO_DATA,
- { },
- { { 0, 0x20000000 } }
- },
- {
- "AND_OR_LSH_K",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_IMM, 0xff),
- BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
- BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 27),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_LD | BPF_IMM, 0xf),
- BPF_STMT(BPF_ALU | BPF_OR | BPF_K, 0xf0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC | FLAG_NO_DATA,
- { },
- { { 0, 0x800000ff }, { 1, 0x800000ff } },
- },
- {
- "LD_IMM_0",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_IMM, 0), /* ld #0 */
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0, 1, 0),
- BPF_STMT(BPF_RET | BPF_K, 0),
- BPF_STMT(BPF_RET | BPF_K, 1),
- },
- CLASSIC,
- { },
- { { 1, 1 } },
- },
- {
- "LD_IND",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_LEN, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_IND, MAX_K),
- BPF_STMT(BPF_RET | BPF_K, 1)
- },
- CLASSIC,
- { },
- { { 1, 0 }, { 10, 0 }, { 60, 0 } },
- },
- {
- "LD_ABS",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS, 1000),
- BPF_STMT(BPF_RET | BPF_K, 1)
- },
- CLASSIC,
- { },
- { { 1, 0 }, { 10, 0 }, { 60, 0 } },
- },
- {
- "LD_ABS_LL",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF + 1),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC,
- { 1, 2, 3 },
- { { 1, 0 }, { 2, 3 } },
- },
- {
- "LD_IND_LL",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_IMM, SKF_LL_OFF - 1),
- BPF_STMT(BPF_LDX | BPF_LEN, 0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC,
- { 1, 2, 3, 0xff },
- { { 1, 1 }, { 3, 3 }, { 4, 0xff } },
- },
- {
- "LD_ABS_NET",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF + 1),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC,
- { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
- { { 15, 0 }, { 16, 3 } },
- },
- {
- "LD_IND_NET",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_IMM, SKF_NET_OFF - 15),
- BPF_STMT(BPF_LDX | BPF_LEN, 0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC,
- { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
- { { 14, 0 }, { 15, 1 }, { 17, 3 } },
- },
- {
- "LD_PKTTYPE",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_PKTTYPE),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
- BPF_STMT(BPF_RET | BPF_K, 1),
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_PKTTYPE),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
- BPF_STMT(BPF_RET | BPF_K, 1),
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_PKTTYPE),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
- BPF_STMT(BPF_RET | BPF_K, 1),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC,
- { },
- { { 1, 3 }, { 10, 3 } },
- },
- {
- "LD_MARK",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_MARK),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC,
- { },
- { { 1, SKB_MARK}, { 10, SKB_MARK} },
- },
- {
- "LD_RXHASH",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_RXHASH),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC,
- { },
- { { 1, SKB_HASH}, { 10, SKB_HASH} },
- },
- {
- "LD_QUEUE",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_QUEUE),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC,
- { },
- { { 1, SKB_QUEUE_MAP }, { 10, SKB_QUEUE_MAP } },
- },
- {
- "LD_PROTOCOL",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 1),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 20, 1, 0),
- BPF_STMT(BPF_RET | BPF_K, 0),
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_PROTOCOL),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 30, 1, 0),
- BPF_STMT(BPF_RET | BPF_K, 0),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC,
- { 10, 20, 30 },
- { { 10, ETH_P_IP }, { 100, ETH_P_IP } },
- },
- {
- "LD_VLAN_TAG",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_VLAN_TAG),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC,
- { },
- {
- { 1, SKB_VLAN_TCI },
- { 10, SKB_VLAN_TCI }
- },
- },
- {
- "LD_VLAN_TAG_PRESENT",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_VLAN_TAG_PRESENT),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC,
- { },
- {
- { 1, SKB_VLAN_PRESENT },
- { 10, SKB_VLAN_PRESENT }
- },
- },
- {
- "LD_IFINDEX",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_IFINDEX),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC,
- { },
- { { 1, SKB_DEV_IFINDEX }, { 10, SKB_DEV_IFINDEX } },
- },
- {
- "LD_HATYPE",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_HATYPE),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC,
- { },
- { { 1, SKB_DEV_TYPE }, { 10, SKB_DEV_TYPE } },
- },
- {
- "LD_CPU",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_CPU),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_CPU),
- BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC,
- { },
- { { 1, 0 }, { 10, 0 } },
- },
- {
- "LD_NLATTR",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 2),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_LDX | BPF_IMM, 3),
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_NLATTR),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC,
- #ifdef __BIG_ENDIAN
- { 0xff, 0xff, 0, 4, 0, 2, 0, 4, 0, 3 },
- #else
- { 0xff, 0xff, 4, 0, 2, 0, 4, 0, 3, 0 },
- #endif
- { { 4, 0 }, { 20, 6 } },
- },
- {
- "LD_NLATTR_NEST",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_IMM, 2),
- BPF_STMT(BPF_LDX | BPF_IMM, 3),
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_NLATTR_NEST),
- BPF_STMT(BPF_LD | BPF_IMM, 2),
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_NLATTR_NEST),
- BPF_STMT(BPF_LD | BPF_IMM, 2),
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_NLATTR_NEST),
- BPF_STMT(BPF_LD | BPF_IMM, 2),
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_NLATTR_NEST),
- BPF_STMT(BPF_LD | BPF_IMM, 2),
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_NLATTR_NEST),
- BPF_STMT(BPF_LD | BPF_IMM, 2),
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_NLATTR_NEST),
- BPF_STMT(BPF_LD | BPF_IMM, 2),
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_NLATTR_NEST),
- BPF_STMT(BPF_LD | BPF_IMM, 2),
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_NLATTR_NEST),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC,
- #ifdef __BIG_ENDIAN
- { 0xff, 0xff, 0, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3 },
- #else
- { 0xff, 0xff, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3, 0 },
- #endif
- { { 4, 0 }, { 20, 10 } },
- },
- {
- "LD_PAYLOAD_OFF",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_PAY_OFFSET),
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_PAY_OFFSET),
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_PAY_OFFSET),
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_PAY_OFFSET),
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_PAY_OFFSET),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC,
- /* 00:00:00:00:00:00 > 00:00:00:00:00:00, ethtype IPv4 (0x0800),
- * length 98: 127.0.0.1 > 127.0.0.1: ICMP echo request,
- * id 9737, seq 1, length 64
- */
- { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x08, 0x00,
- 0x45, 0x00, 0x00, 0x54, 0xac, 0x8b, 0x40, 0x00, 0x40,
- 0x01, 0x90, 0x1b, 0x7f, 0x00, 0x00, 0x01 },
- { { 30, 0 }, { 100, 42 } },
- },
- {
- "LD_ANC_XOR",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_IMM, 10),
- BPF_STMT(BPF_LDX | BPF_IMM, 300),
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_ALU_XOR_X),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC,
- { },
- { { 4, 0xA ^ 300 }, { 20, 0xA ^ 300 } },
- },
- {
- "SPILL_FILL",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_LEN, 0),
- BPF_STMT(BPF_LD | BPF_IMM, 2),
- BPF_STMT(BPF_ALU | BPF_RSH, 1),
- BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
- BPF_STMT(BPF_ST, 1), /* M1 = 1 ^ len */
- BPF_STMT(BPF_ALU | BPF_XOR | BPF_K, 0x80000000),
- BPF_STMT(BPF_ST, 2), /* M2 = 1 ^ len ^ 0x80000000 */
- BPF_STMT(BPF_STX, 15), /* M3 = len */
- BPF_STMT(BPF_LDX | BPF_MEM, 1),
- BPF_STMT(BPF_LD | BPF_MEM, 2),
- BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
- BPF_STMT(BPF_LDX | BPF_MEM, 15),
- BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC,
- { },
- { { 1, 0x80000001 }, { 2, 0x80000002 }, { 60, 0x80000000 ^ 60 } }
- },
- {
- "JEQ",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_LEN, 0),
- BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 0, 1),
- BPF_STMT(BPF_RET | BPF_K, 1),
- BPF_STMT(BPF_RET | BPF_K, MAX_K)
- },
- CLASSIC,
- { 3, 3, 3, 3, 3 },
- { { 1, 0 }, { 3, 1 }, { 4, MAX_K } },
- },
- {
- "JGT",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_LEN, 0),
- BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
- BPF_JUMP(BPF_JMP | BPF_JGT | BPF_X, 0, 0, 1),
- BPF_STMT(BPF_RET | BPF_K, 1),
- BPF_STMT(BPF_RET | BPF_K, MAX_K)
- },
- CLASSIC,
- { 4, 4, 4, 3, 3 },
- { { 2, 0 }, { 3, 1 }, { 4, MAX_K } },
- },
- {
- "JGE (jt 0), test 1",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_LEN, 0),
- BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
- BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
- BPF_STMT(BPF_RET | BPF_K, 1),
- BPF_STMT(BPF_RET | BPF_K, MAX_K)
- },
- CLASSIC,
- { 4, 4, 4, 3, 3 },
- { { 2, 0 }, { 3, 1 }, { 4, 1 } },
- },
- {
- "JGE (jt 0), test 2",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_LEN, 0),
- BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
- BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
- BPF_STMT(BPF_RET | BPF_K, 1),
- BPF_STMT(BPF_RET | BPF_K, MAX_K)
- },
- CLASSIC,
- { 4, 4, 5, 3, 3 },
- { { 4, 1 }, { 5, 1 }, { 6, MAX_K } },
- },
- {
- "JGE",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_LEN, 0),
- BPF_STMT(BPF_LD | BPF_B | BPF_IND, MAX_K),
- BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 1, 1, 0),
- BPF_STMT(BPF_RET | BPF_K, 10),
- BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 2, 1, 0),
- BPF_STMT(BPF_RET | BPF_K, 20),
- BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 3, 1, 0),
- BPF_STMT(BPF_RET | BPF_K, 30),
- BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 4, 1, 0),
- BPF_STMT(BPF_RET | BPF_K, 40),
- BPF_STMT(BPF_RET | BPF_K, MAX_K)
- },
- CLASSIC,
- { 1, 2, 3, 4, 5 },
- { { 1, 20 }, { 3, 40 }, { 5, MAX_K } },
- },
- {
- "JSET",
- .u.insns = {
- BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
- BPF_JUMP(BPF_JMP | BPF_JA, 1, 1, 1),
- BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
- BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
- BPF_STMT(BPF_LDX | BPF_LEN, 0),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, 4),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_LD | BPF_W | BPF_IND, 0),
- BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 1, 0, 1),
- BPF_STMT(BPF_RET | BPF_K, 10),
- BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x80000000, 0, 1),
- BPF_STMT(BPF_RET | BPF_K, 20),
- BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
- BPF_STMT(BPF_RET | BPF_K, 30),
- BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
- BPF_STMT(BPF_RET | BPF_K, 30),
- BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
- BPF_STMT(BPF_RET | BPF_K, 30),
- BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
- BPF_STMT(BPF_RET | BPF_K, 30),
- BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
- BPF_STMT(BPF_RET | BPF_K, 30),
- BPF_STMT(BPF_RET | BPF_K, MAX_K)
- },
- CLASSIC,
- { 0, 0xAA, 0x55, 1 },
- { { 4, 10 }, { 5, 20 }, { 6, MAX_K } },
- },
- {
- "tcpdump port 22",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 0, 8), /* IPv6 */
- BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 20),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 17),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 54),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 14, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 56),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 12, 13),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0800, 0, 12), /* IPv4 */
- BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 8),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
- BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 6, 0),
- BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
- BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 1),
- BPF_STMT(BPF_RET | BPF_K, 0xffff),
- BPF_STMT(BPF_RET | BPF_K, 0),
- },
- CLASSIC,
- /* 3c:07:54:43:e5:76 > 10:bf:48:d6:43:d6, ethertype IPv4(0x0800)
- * length 114: 10.1.1.149.49700 > 10.1.2.10.22: Flags [P.],
- * seq 1305692979:1305693027, ack 3650467037, win 65535,
- * options [nop,nop,TS val 2502645400 ecr 3971138], length 48
- */
- { 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
- 0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
- 0x08, 0x00,
- 0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
- 0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
- 0x0a, 0x01, 0x01, 0x95, /* ip src */
- 0x0a, 0x01, 0x02, 0x0a, /* ip dst */
- 0xc2, 0x24,
- 0x00, 0x16 /* dst port */ },
- { { 10, 0 }, { 30, 0 }, { 100, 65535 } },
- },
- {
- "tcpdump complex",
- .u.insns = {
- /* tcpdump -nei eth0 'tcp port 22 and (((ip[2:2] -
- * ((ip[0]&0xf)<<2)) - ((tcp[12]&0xf0)>>2)) != 0) and
- * (len > 115 or len < 30000000000)' -d
- */
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 30, 0),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x800, 0, 29),
- BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 0, 27),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
- BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 25, 0),
- BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
- BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 20),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 16),
- BPF_STMT(BPF_ST, 1),
- BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 14),
- BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf),
- BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 2),
- BPF_STMT(BPF_MISC | BPF_TAX, 0x5), /* libpcap emits K on TAX */
- BPF_STMT(BPF_LD | BPF_MEM, 1),
- BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
- BPF_STMT(BPF_ST, 5),
- BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
- BPF_STMT(BPF_LD | BPF_B | BPF_IND, 26),
- BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
- BPF_STMT(BPF_ALU | BPF_RSH | BPF_K, 2),
- BPF_STMT(BPF_MISC | BPF_TAX, 0x9), /* libpcap emits K on TAX */
- BPF_STMT(BPF_LD | BPF_MEM, 5),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 4, 0),
- BPF_STMT(BPF_LD | BPF_LEN, 0),
- BPF_JUMP(BPF_JMP | BPF_JGT | BPF_K, 0x73, 1, 0),
- BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 0xfc23ac00, 1, 0),
- BPF_STMT(BPF_RET | BPF_K, 0xffff),
- BPF_STMT(BPF_RET | BPF_K, 0),
- },
- CLASSIC,
- { 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
- 0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
- 0x08, 0x00,
- 0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
- 0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
- 0x0a, 0x01, 0x01, 0x95, /* ip src */
- 0x0a, 0x01, 0x02, 0x0a, /* ip dst */
- 0xc2, 0x24,
- 0x00, 0x16 /* dst port */ },
- { { 10, 0 }, { 30, 0 }, { 100, 65535 } },
- },
- {
- "RET_A",
- .u.insns = {
- /* check that uninitialized X and A contain zeros */
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_RET | BPF_A, 0)
- },
- CLASSIC,
- { },
- { {1, 0}, {2, 0} },
- },
- {
- "INT: ADD trivial",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R1, 1),
- BPF_ALU64_IMM(BPF_ADD, R1, 2),
- BPF_ALU64_IMM(BPF_MOV, R2, 3),
- BPF_ALU64_REG(BPF_SUB, R1, R2),
- BPF_ALU64_IMM(BPF_ADD, R1, -1),
- BPF_ALU64_IMM(BPF_MUL, R1, 3),
- BPF_ALU64_REG(BPF_MOV, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xfffffffd } }
- },
- {
- "INT: MUL_X",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, -1),
- BPF_ALU64_IMM(BPF_MOV, R1, -1),
- BPF_ALU64_IMM(BPF_MOV, R2, 3),
- BPF_ALU64_REG(BPF_MUL, R1, R2),
- BPF_JMP_IMM(BPF_JEQ, R1, 0xfffffffd, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } }
- },
- {
- "INT: MUL_X2",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -1),
- BPF_ALU32_IMM(BPF_MOV, R1, -1),
- BPF_ALU32_IMM(BPF_MOV, R2, 3),
- BPF_ALU64_REG(BPF_MUL, R1, R2),
- BPF_ALU64_IMM(BPF_RSH, R1, 8),
- BPF_JMP_IMM(BPF_JEQ, R1, 0x2ffffff, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } }
- },
- {
- "INT: MUL32_X",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -1),
- BPF_ALU64_IMM(BPF_MOV, R1, -1),
- BPF_ALU32_IMM(BPF_MOV, R2, 3),
- BPF_ALU32_REG(BPF_MUL, R1, R2),
- BPF_ALU64_IMM(BPF_RSH, R1, 8),
- BPF_JMP_IMM(BPF_JEQ, R1, 0xffffff, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } }
- },
- {
- /* Have to test all register combinations, since
- * JITing of different registers will produce
- * different asm code.
- */
- "INT: ADD 64-bit",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_ALU64_IMM(BPF_MOV, R1, 1),
- BPF_ALU64_IMM(BPF_MOV, R2, 2),
- BPF_ALU64_IMM(BPF_MOV, R3, 3),
- BPF_ALU64_IMM(BPF_MOV, R4, 4),
- BPF_ALU64_IMM(BPF_MOV, R5, 5),
- BPF_ALU64_IMM(BPF_MOV, R6, 6),
- BPF_ALU64_IMM(BPF_MOV, R7, 7),
- BPF_ALU64_IMM(BPF_MOV, R8, 8),
- BPF_ALU64_IMM(BPF_MOV, R9, 9),
- BPF_ALU64_IMM(BPF_ADD, R0, 20),
- BPF_ALU64_IMM(BPF_ADD, R1, 20),
- BPF_ALU64_IMM(BPF_ADD, R2, 20),
- BPF_ALU64_IMM(BPF_ADD, R3, 20),
- BPF_ALU64_IMM(BPF_ADD, R4, 20),
- BPF_ALU64_IMM(BPF_ADD, R5, 20),
- BPF_ALU64_IMM(BPF_ADD, R6, 20),
- BPF_ALU64_IMM(BPF_ADD, R7, 20),
- BPF_ALU64_IMM(BPF_ADD, R8, 20),
- BPF_ALU64_IMM(BPF_ADD, R9, 20),
- BPF_ALU64_IMM(BPF_SUB, R0, 10),
- BPF_ALU64_IMM(BPF_SUB, R1, 10),
- BPF_ALU64_IMM(BPF_SUB, R2, 10),
- BPF_ALU64_IMM(BPF_SUB, R3, 10),
- BPF_ALU64_IMM(BPF_SUB, R4, 10),
- BPF_ALU64_IMM(BPF_SUB, R5, 10),
- BPF_ALU64_IMM(BPF_SUB, R6, 10),
- BPF_ALU64_IMM(BPF_SUB, R7, 10),
- BPF_ALU64_IMM(BPF_SUB, R8, 10),
- BPF_ALU64_IMM(BPF_SUB, R9, 10),
- BPF_ALU64_REG(BPF_ADD, R0, R0),
- BPF_ALU64_REG(BPF_ADD, R0, R1),
- BPF_ALU64_REG(BPF_ADD, R0, R2),
- BPF_ALU64_REG(BPF_ADD, R0, R3),
- BPF_ALU64_REG(BPF_ADD, R0, R4),
- BPF_ALU64_REG(BPF_ADD, R0, R5),
- BPF_ALU64_REG(BPF_ADD, R0, R6),
- BPF_ALU64_REG(BPF_ADD, R0, R7),
- BPF_ALU64_REG(BPF_ADD, R0, R8),
- BPF_ALU64_REG(BPF_ADD, R0, R9), /* R0 == 155 */
- BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_REG(BPF_ADD, R1, R0),
- BPF_ALU64_REG(BPF_ADD, R1, R1),
- BPF_ALU64_REG(BPF_ADD, R1, R2),
- BPF_ALU64_REG(BPF_ADD, R1, R3),
- BPF_ALU64_REG(BPF_ADD, R1, R4),
- BPF_ALU64_REG(BPF_ADD, R1, R5),
- BPF_ALU64_REG(BPF_ADD, R1, R6),
- BPF_ALU64_REG(BPF_ADD, R1, R7),
- BPF_ALU64_REG(BPF_ADD, R1, R8),
- BPF_ALU64_REG(BPF_ADD, R1, R9), /* R1 == 456 */
- BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_REG(BPF_ADD, R2, R0),
- BPF_ALU64_REG(BPF_ADD, R2, R1),
- BPF_ALU64_REG(BPF_ADD, R2, R2),
- BPF_ALU64_REG(BPF_ADD, R2, R3),
- BPF_ALU64_REG(BPF_ADD, R2, R4),
- BPF_ALU64_REG(BPF_ADD, R2, R5),
- BPF_ALU64_REG(BPF_ADD, R2, R6),
- BPF_ALU64_REG(BPF_ADD, R2, R7),
- BPF_ALU64_REG(BPF_ADD, R2, R8),
- BPF_ALU64_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
- BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_REG(BPF_ADD, R3, R0),
- BPF_ALU64_REG(BPF_ADD, R3, R1),
- BPF_ALU64_REG(BPF_ADD, R3, R2),
- BPF_ALU64_REG(BPF_ADD, R3, R3),
- BPF_ALU64_REG(BPF_ADD, R3, R4),
- BPF_ALU64_REG(BPF_ADD, R3, R5),
- BPF_ALU64_REG(BPF_ADD, R3, R6),
- BPF_ALU64_REG(BPF_ADD, R3, R7),
- BPF_ALU64_REG(BPF_ADD, R3, R8),
- BPF_ALU64_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
- BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_REG(BPF_ADD, R4, R0),
- BPF_ALU64_REG(BPF_ADD, R4, R1),
- BPF_ALU64_REG(BPF_ADD, R4, R2),
- BPF_ALU64_REG(BPF_ADD, R4, R3),
- BPF_ALU64_REG(BPF_ADD, R4, R4),
- BPF_ALU64_REG(BPF_ADD, R4, R5),
- BPF_ALU64_REG(BPF_ADD, R4, R6),
- BPF_ALU64_REG(BPF_ADD, R4, R7),
- BPF_ALU64_REG(BPF_ADD, R4, R8),
- BPF_ALU64_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
- BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_REG(BPF_ADD, R5, R0),
- BPF_ALU64_REG(BPF_ADD, R5, R1),
- BPF_ALU64_REG(BPF_ADD, R5, R2),
- BPF_ALU64_REG(BPF_ADD, R5, R3),
- BPF_ALU64_REG(BPF_ADD, R5, R4),
- BPF_ALU64_REG(BPF_ADD, R5, R5),
- BPF_ALU64_REG(BPF_ADD, R5, R6),
- BPF_ALU64_REG(BPF_ADD, R5, R7),
- BPF_ALU64_REG(BPF_ADD, R5, R8),
- BPF_ALU64_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
- BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_REG(BPF_ADD, R6, R0),
- BPF_ALU64_REG(BPF_ADD, R6, R1),
- BPF_ALU64_REG(BPF_ADD, R6, R2),
- BPF_ALU64_REG(BPF_ADD, R6, R3),
- BPF_ALU64_REG(BPF_ADD, R6, R4),
- BPF_ALU64_REG(BPF_ADD, R6, R5),
- BPF_ALU64_REG(BPF_ADD, R6, R6),
- BPF_ALU64_REG(BPF_ADD, R6, R7),
- BPF_ALU64_REG(BPF_ADD, R6, R8),
- BPF_ALU64_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
- BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_REG(BPF_ADD, R7, R0),
- BPF_ALU64_REG(BPF_ADD, R7, R1),
- BPF_ALU64_REG(BPF_ADD, R7, R2),
- BPF_ALU64_REG(BPF_ADD, R7, R3),
- BPF_ALU64_REG(BPF_ADD, R7, R4),
- BPF_ALU64_REG(BPF_ADD, R7, R5),
- BPF_ALU64_REG(BPF_ADD, R7, R6),
- BPF_ALU64_REG(BPF_ADD, R7, R7),
- BPF_ALU64_REG(BPF_ADD, R7, R8),
- BPF_ALU64_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
- BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_REG(BPF_ADD, R8, R0),
- BPF_ALU64_REG(BPF_ADD, R8, R1),
- BPF_ALU64_REG(BPF_ADD, R8, R2),
- BPF_ALU64_REG(BPF_ADD, R8, R3),
- BPF_ALU64_REG(BPF_ADD, R8, R4),
- BPF_ALU64_REG(BPF_ADD, R8, R5),
- BPF_ALU64_REG(BPF_ADD, R8, R6),
- BPF_ALU64_REG(BPF_ADD, R8, R7),
- BPF_ALU64_REG(BPF_ADD, R8, R8),
- BPF_ALU64_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
- BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_REG(BPF_ADD, R9, R0),
- BPF_ALU64_REG(BPF_ADD, R9, R1),
- BPF_ALU64_REG(BPF_ADD, R9, R2),
- BPF_ALU64_REG(BPF_ADD, R9, R3),
- BPF_ALU64_REG(BPF_ADD, R9, R4),
- BPF_ALU64_REG(BPF_ADD, R9, R5),
- BPF_ALU64_REG(BPF_ADD, R9, R6),
- BPF_ALU64_REG(BPF_ADD, R9, R7),
- BPF_ALU64_REG(BPF_ADD, R9, R8),
- BPF_ALU64_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
- BPF_ALU64_REG(BPF_MOV, R0, R9),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2957380 } }
- },
- {
- "INT: ADD 32-bit",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 20),
- BPF_ALU32_IMM(BPF_MOV, R1, 1),
- BPF_ALU32_IMM(BPF_MOV, R2, 2),
- BPF_ALU32_IMM(BPF_MOV, R3, 3),
- BPF_ALU32_IMM(BPF_MOV, R4, 4),
- BPF_ALU32_IMM(BPF_MOV, R5, 5),
- BPF_ALU32_IMM(BPF_MOV, R6, 6),
- BPF_ALU32_IMM(BPF_MOV, R7, 7),
- BPF_ALU32_IMM(BPF_MOV, R8, 8),
- BPF_ALU32_IMM(BPF_MOV, R9, 9),
- BPF_ALU64_IMM(BPF_ADD, R1, 10),
- BPF_ALU64_IMM(BPF_ADD, R2, 10),
- BPF_ALU64_IMM(BPF_ADD, R3, 10),
- BPF_ALU64_IMM(BPF_ADD, R4, 10),
- BPF_ALU64_IMM(BPF_ADD, R5, 10),
- BPF_ALU64_IMM(BPF_ADD, R6, 10),
- BPF_ALU64_IMM(BPF_ADD, R7, 10),
- BPF_ALU64_IMM(BPF_ADD, R8, 10),
- BPF_ALU64_IMM(BPF_ADD, R9, 10),
- BPF_ALU32_REG(BPF_ADD, R0, R1),
- BPF_ALU32_REG(BPF_ADD, R0, R2),
- BPF_ALU32_REG(BPF_ADD, R0, R3),
- BPF_ALU32_REG(BPF_ADD, R0, R4),
- BPF_ALU32_REG(BPF_ADD, R0, R5),
- BPF_ALU32_REG(BPF_ADD, R0, R6),
- BPF_ALU32_REG(BPF_ADD, R0, R7),
- BPF_ALU32_REG(BPF_ADD, R0, R8),
- BPF_ALU32_REG(BPF_ADD, R0, R9), /* R0 == 155 */
- BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_REG(BPF_ADD, R1, R0),
- BPF_ALU32_REG(BPF_ADD, R1, R1),
- BPF_ALU32_REG(BPF_ADD, R1, R2),
- BPF_ALU32_REG(BPF_ADD, R1, R3),
- BPF_ALU32_REG(BPF_ADD, R1, R4),
- BPF_ALU32_REG(BPF_ADD, R1, R5),
- BPF_ALU32_REG(BPF_ADD, R1, R6),
- BPF_ALU32_REG(BPF_ADD, R1, R7),
- BPF_ALU32_REG(BPF_ADD, R1, R8),
- BPF_ALU32_REG(BPF_ADD, R1, R9), /* R1 == 456 */
- BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_REG(BPF_ADD, R2, R0),
- BPF_ALU32_REG(BPF_ADD, R2, R1),
- BPF_ALU32_REG(BPF_ADD, R2, R2),
- BPF_ALU32_REG(BPF_ADD, R2, R3),
- BPF_ALU32_REG(BPF_ADD, R2, R4),
- BPF_ALU32_REG(BPF_ADD, R2, R5),
- BPF_ALU32_REG(BPF_ADD, R2, R6),
- BPF_ALU32_REG(BPF_ADD, R2, R7),
- BPF_ALU32_REG(BPF_ADD, R2, R8),
- BPF_ALU32_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
- BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_REG(BPF_ADD, R3, R0),
- BPF_ALU32_REG(BPF_ADD, R3, R1),
- BPF_ALU32_REG(BPF_ADD, R3, R2),
- BPF_ALU32_REG(BPF_ADD, R3, R3),
- BPF_ALU32_REG(BPF_ADD, R3, R4),
- BPF_ALU32_REG(BPF_ADD, R3, R5),
- BPF_ALU32_REG(BPF_ADD, R3, R6),
- BPF_ALU32_REG(BPF_ADD, R3, R7),
- BPF_ALU32_REG(BPF_ADD, R3, R8),
- BPF_ALU32_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
- BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_REG(BPF_ADD, R4, R0),
- BPF_ALU32_REG(BPF_ADD, R4, R1),
- BPF_ALU32_REG(BPF_ADD, R4, R2),
- BPF_ALU32_REG(BPF_ADD, R4, R3),
- BPF_ALU32_REG(BPF_ADD, R4, R4),
- BPF_ALU32_REG(BPF_ADD, R4, R5),
- BPF_ALU32_REG(BPF_ADD, R4, R6),
- BPF_ALU32_REG(BPF_ADD, R4, R7),
- BPF_ALU32_REG(BPF_ADD, R4, R8),
- BPF_ALU32_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
- BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_REG(BPF_ADD, R5, R0),
- BPF_ALU32_REG(BPF_ADD, R5, R1),
- BPF_ALU32_REG(BPF_ADD, R5, R2),
- BPF_ALU32_REG(BPF_ADD, R5, R3),
- BPF_ALU32_REG(BPF_ADD, R5, R4),
- BPF_ALU32_REG(BPF_ADD, R5, R5),
- BPF_ALU32_REG(BPF_ADD, R5, R6),
- BPF_ALU32_REG(BPF_ADD, R5, R7),
- BPF_ALU32_REG(BPF_ADD, R5, R8),
- BPF_ALU32_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
- BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_REG(BPF_ADD, R6, R0),
- BPF_ALU32_REG(BPF_ADD, R6, R1),
- BPF_ALU32_REG(BPF_ADD, R6, R2),
- BPF_ALU32_REG(BPF_ADD, R6, R3),
- BPF_ALU32_REG(BPF_ADD, R6, R4),
- BPF_ALU32_REG(BPF_ADD, R6, R5),
- BPF_ALU32_REG(BPF_ADD, R6, R6),
- BPF_ALU32_REG(BPF_ADD, R6, R7),
- BPF_ALU32_REG(BPF_ADD, R6, R8),
- BPF_ALU32_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
- BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_REG(BPF_ADD, R7, R0),
- BPF_ALU32_REG(BPF_ADD, R7, R1),
- BPF_ALU32_REG(BPF_ADD, R7, R2),
- BPF_ALU32_REG(BPF_ADD, R7, R3),
- BPF_ALU32_REG(BPF_ADD, R7, R4),
- BPF_ALU32_REG(BPF_ADD, R7, R5),
- BPF_ALU32_REG(BPF_ADD, R7, R6),
- BPF_ALU32_REG(BPF_ADD, R7, R7),
- BPF_ALU32_REG(BPF_ADD, R7, R8),
- BPF_ALU32_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
- BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_REG(BPF_ADD, R8, R0),
- BPF_ALU32_REG(BPF_ADD, R8, R1),
- BPF_ALU32_REG(BPF_ADD, R8, R2),
- BPF_ALU32_REG(BPF_ADD, R8, R3),
- BPF_ALU32_REG(BPF_ADD, R8, R4),
- BPF_ALU32_REG(BPF_ADD, R8, R5),
- BPF_ALU32_REG(BPF_ADD, R8, R6),
- BPF_ALU32_REG(BPF_ADD, R8, R7),
- BPF_ALU32_REG(BPF_ADD, R8, R8),
- BPF_ALU32_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
- BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_REG(BPF_ADD, R9, R0),
- BPF_ALU32_REG(BPF_ADD, R9, R1),
- BPF_ALU32_REG(BPF_ADD, R9, R2),
- BPF_ALU32_REG(BPF_ADD, R9, R3),
- BPF_ALU32_REG(BPF_ADD, R9, R4),
- BPF_ALU32_REG(BPF_ADD, R9, R5),
- BPF_ALU32_REG(BPF_ADD, R9, R6),
- BPF_ALU32_REG(BPF_ADD, R9, R7),
- BPF_ALU32_REG(BPF_ADD, R9, R8),
- BPF_ALU32_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
- BPF_ALU32_REG(BPF_MOV, R0, R9),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2957380 } }
- },
- { /* Mainly checking JIT here. */
- "INT: SUB",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_ALU64_IMM(BPF_MOV, R1, 1),
- BPF_ALU64_IMM(BPF_MOV, R2, 2),
- BPF_ALU64_IMM(BPF_MOV, R3, 3),
- BPF_ALU64_IMM(BPF_MOV, R4, 4),
- BPF_ALU64_IMM(BPF_MOV, R5, 5),
- BPF_ALU64_IMM(BPF_MOV, R6, 6),
- BPF_ALU64_IMM(BPF_MOV, R7, 7),
- BPF_ALU64_IMM(BPF_MOV, R8, 8),
- BPF_ALU64_IMM(BPF_MOV, R9, 9),
- BPF_ALU64_REG(BPF_SUB, R0, R0),
- BPF_ALU64_REG(BPF_SUB, R0, R1),
- BPF_ALU64_REG(BPF_SUB, R0, R2),
- BPF_ALU64_REG(BPF_SUB, R0, R3),
- BPF_ALU64_REG(BPF_SUB, R0, R4),
- BPF_ALU64_REG(BPF_SUB, R0, R5),
- BPF_ALU64_REG(BPF_SUB, R0, R6),
- BPF_ALU64_REG(BPF_SUB, R0, R7),
- BPF_ALU64_REG(BPF_SUB, R0, R8),
- BPF_ALU64_REG(BPF_SUB, R0, R9),
- BPF_ALU64_IMM(BPF_SUB, R0, 10),
- BPF_JMP_IMM(BPF_JEQ, R0, -55, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_REG(BPF_SUB, R1, R0),
- BPF_ALU64_REG(BPF_SUB, R1, R2),
- BPF_ALU64_REG(BPF_SUB, R1, R3),
- BPF_ALU64_REG(BPF_SUB, R1, R4),
- BPF_ALU64_REG(BPF_SUB, R1, R5),
- BPF_ALU64_REG(BPF_SUB, R1, R6),
- BPF_ALU64_REG(BPF_SUB, R1, R7),
- BPF_ALU64_REG(BPF_SUB, R1, R8),
- BPF_ALU64_REG(BPF_SUB, R1, R9),
- BPF_ALU64_IMM(BPF_SUB, R1, 10),
- BPF_ALU64_REG(BPF_SUB, R2, R0),
- BPF_ALU64_REG(BPF_SUB, R2, R1),
- BPF_ALU64_REG(BPF_SUB, R2, R3),
- BPF_ALU64_REG(BPF_SUB, R2, R4),
- BPF_ALU64_REG(BPF_SUB, R2, R5),
- BPF_ALU64_REG(BPF_SUB, R2, R6),
- BPF_ALU64_REG(BPF_SUB, R2, R7),
- BPF_ALU64_REG(BPF_SUB, R2, R8),
- BPF_ALU64_REG(BPF_SUB, R2, R9),
- BPF_ALU64_IMM(BPF_SUB, R2, 10),
- BPF_ALU64_REG(BPF_SUB, R3, R0),
- BPF_ALU64_REG(BPF_SUB, R3, R1),
- BPF_ALU64_REG(BPF_SUB, R3, R2),
- BPF_ALU64_REG(BPF_SUB, R3, R4),
- BPF_ALU64_REG(BPF_SUB, R3, R5),
- BPF_ALU64_REG(BPF_SUB, R3, R6),
- BPF_ALU64_REG(BPF_SUB, R3, R7),
- BPF_ALU64_REG(BPF_SUB, R3, R8),
- BPF_ALU64_REG(BPF_SUB, R3, R9),
- BPF_ALU64_IMM(BPF_SUB, R3, 10),
- BPF_ALU64_REG(BPF_SUB, R4, R0),
- BPF_ALU64_REG(BPF_SUB, R4, R1),
- BPF_ALU64_REG(BPF_SUB, R4, R2),
- BPF_ALU64_REG(BPF_SUB, R4, R3),
- BPF_ALU64_REG(BPF_SUB, R4, R5),
- BPF_ALU64_REG(BPF_SUB, R4, R6),
- BPF_ALU64_REG(BPF_SUB, R4, R7),
- BPF_ALU64_REG(BPF_SUB, R4, R8),
- BPF_ALU64_REG(BPF_SUB, R4, R9),
- BPF_ALU64_IMM(BPF_SUB, R4, 10),
- BPF_ALU64_REG(BPF_SUB, R5, R0),
- BPF_ALU64_REG(BPF_SUB, R5, R1),
- BPF_ALU64_REG(BPF_SUB, R5, R2),
- BPF_ALU64_REG(BPF_SUB, R5, R3),
- BPF_ALU64_REG(BPF_SUB, R5, R4),
- BPF_ALU64_REG(BPF_SUB, R5, R6),
- BPF_ALU64_REG(BPF_SUB, R5, R7),
- BPF_ALU64_REG(BPF_SUB, R5, R8),
- BPF_ALU64_REG(BPF_SUB, R5, R9),
- BPF_ALU64_IMM(BPF_SUB, R5, 10),
- BPF_ALU64_REG(BPF_SUB, R6, R0),
- BPF_ALU64_REG(BPF_SUB, R6, R1),
- BPF_ALU64_REG(BPF_SUB, R6, R2),
- BPF_ALU64_REG(BPF_SUB, R6, R3),
- BPF_ALU64_REG(BPF_SUB, R6, R4),
- BPF_ALU64_REG(BPF_SUB, R6, R5),
- BPF_ALU64_REG(BPF_SUB, R6, R7),
- BPF_ALU64_REG(BPF_SUB, R6, R8),
- BPF_ALU64_REG(BPF_SUB, R6, R9),
- BPF_ALU64_IMM(BPF_SUB, R6, 10),
- BPF_ALU64_REG(BPF_SUB, R7, R0),
- BPF_ALU64_REG(BPF_SUB, R7, R1),
- BPF_ALU64_REG(BPF_SUB, R7, R2),
- BPF_ALU64_REG(BPF_SUB, R7, R3),
- BPF_ALU64_REG(BPF_SUB, R7, R4),
- BPF_ALU64_REG(BPF_SUB, R7, R5),
- BPF_ALU64_REG(BPF_SUB, R7, R6),
- BPF_ALU64_REG(BPF_SUB, R7, R8),
- BPF_ALU64_REG(BPF_SUB, R7, R9),
- BPF_ALU64_IMM(BPF_SUB, R7, 10),
- BPF_ALU64_REG(BPF_SUB, R8, R0),
- BPF_ALU64_REG(BPF_SUB, R8, R1),
- BPF_ALU64_REG(BPF_SUB, R8, R2),
- BPF_ALU64_REG(BPF_SUB, R8, R3),
- BPF_ALU64_REG(BPF_SUB, R8, R4),
- BPF_ALU64_REG(BPF_SUB, R8, R5),
- BPF_ALU64_REG(BPF_SUB, R8, R6),
- BPF_ALU64_REG(BPF_SUB, R8, R7),
- BPF_ALU64_REG(BPF_SUB, R8, R9),
- BPF_ALU64_IMM(BPF_SUB, R8, 10),
- BPF_ALU64_REG(BPF_SUB, R9, R0),
- BPF_ALU64_REG(BPF_SUB, R9, R1),
- BPF_ALU64_REG(BPF_SUB, R9, R2),
- BPF_ALU64_REG(BPF_SUB, R9, R3),
- BPF_ALU64_REG(BPF_SUB, R9, R4),
- BPF_ALU64_REG(BPF_SUB, R9, R5),
- BPF_ALU64_REG(BPF_SUB, R9, R6),
- BPF_ALU64_REG(BPF_SUB, R9, R7),
- BPF_ALU64_REG(BPF_SUB, R9, R8),
- BPF_ALU64_IMM(BPF_SUB, R9, 10),
- BPF_ALU64_IMM(BPF_SUB, R0, 10),
- BPF_ALU64_IMM(BPF_NEG, R0, 0),
- BPF_ALU64_REG(BPF_SUB, R0, R1),
- BPF_ALU64_REG(BPF_SUB, R0, R2),
- BPF_ALU64_REG(BPF_SUB, R0, R3),
- BPF_ALU64_REG(BPF_SUB, R0, R4),
- BPF_ALU64_REG(BPF_SUB, R0, R5),
- BPF_ALU64_REG(BPF_SUB, R0, R6),
- BPF_ALU64_REG(BPF_SUB, R0, R7),
- BPF_ALU64_REG(BPF_SUB, R0, R8),
- BPF_ALU64_REG(BPF_SUB, R0, R9),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 11 } }
- },
- { /* Mainly checking JIT here. */
- "INT: XOR",
- .u.insns_int = {
- BPF_ALU64_REG(BPF_SUB, R0, R0),
- BPF_ALU64_REG(BPF_XOR, R1, R1),
- BPF_JMP_REG(BPF_JEQ, R0, R1, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_IMM(BPF_MOV, R0, 10),
- BPF_ALU64_IMM(BPF_MOV, R1, -1),
- BPF_ALU64_REG(BPF_SUB, R1, R1),
- BPF_ALU64_REG(BPF_XOR, R2, R2),
- BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_REG(BPF_SUB, R2, R2),
- BPF_ALU64_REG(BPF_XOR, R3, R3),
- BPF_ALU64_IMM(BPF_MOV, R0, 10),
- BPF_ALU64_IMM(BPF_MOV, R1, -1),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_REG(BPF_SUB, R3, R3),
- BPF_ALU64_REG(BPF_XOR, R4, R4),
- BPF_ALU64_IMM(BPF_MOV, R2, 1),
- BPF_ALU64_IMM(BPF_MOV, R5, -1),
- BPF_JMP_REG(BPF_JEQ, R3, R4, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_REG(BPF_SUB, R4, R4),
- BPF_ALU64_REG(BPF_XOR, R5, R5),
- BPF_ALU64_IMM(BPF_MOV, R3, 1),
- BPF_ALU64_IMM(BPF_MOV, R7, -1),
- BPF_JMP_REG(BPF_JEQ, R5, R4, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_IMM(BPF_MOV, R5, 1),
- BPF_ALU64_REG(BPF_SUB, R5, R5),
- BPF_ALU64_REG(BPF_XOR, R6, R6),
- BPF_ALU64_IMM(BPF_MOV, R1, 1),
- BPF_ALU64_IMM(BPF_MOV, R8, -1),
- BPF_JMP_REG(BPF_JEQ, R5, R6, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_REG(BPF_SUB, R6, R6),
- BPF_ALU64_REG(BPF_XOR, R7, R7),
- BPF_JMP_REG(BPF_JEQ, R7, R6, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_REG(BPF_SUB, R7, R7),
- BPF_ALU64_REG(BPF_XOR, R8, R8),
- BPF_JMP_REG(BPF_JEQ, R7, R8, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_REG(BPF_SUB, R8, R8),
- BPF_ALU64_REG(BPF_XOR, R9, R9),
- BPF_JMP_REG(BPF_JEQ, R9, R8, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_REG(BPF_SUB, R9, R9),
- BPF_ALU64_REG(BPF_XOR, R0, R0),
- BPF_JMP_REG(BPF_JEQ, R9, R0, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_REG(BPF_SUB, R1, R1),
- BPF_ALU64_REG(BPF_XOR, R0, R0),
- BPF_JMP_REG(BPF_JEQ, R9, R0, 2),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- BPF_ALU64_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } }
- },
- { /* Mainly checking JIT here. */
- "INT: MUL",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 11),
- BPF_ALU64_IMM(BPF_MOV, R1, 1),
- BPF_ALU64_IMM(BPF_MOV, R2, 2),
- BPF_ALU64_IMM(BPF_MOV, R3, 3),
- BPF_ALU64_IMM(BPF_MOV, R4, 4),
- BPF_ALU64_IMM(BPF_MOV, R5, 5),
- BPF_ALU64_IMM(BPF_MOV, R6, 6),
- BPF_ALU64_IMM(BPF_MOV, R7, 7),
- BPF_ALU64_IMM(BPF_MOV, R8, 8),
- BPF_ALU64_IMM(BPF_MOV, R9, 9),
- BPF_ALU64_REG(BPF_MUL, R0, R0),
- BPF_ALU64_REG(BPF_MUL, R0, R1),
- BPF_ALU64_REG(BPF_MUL, R0, R2),
- BPF_ALU64_REG(BPF_MUL, R0, R3),
- BPF_ALU64_REG(BPF_MUL, R0, R4),
- BPF_ALU64_REG(BPF_MUL, R0, R5),
- BPF_ALU64_REG(BPF_MUL, R0, R6),
- BPF_ALU64_REG(BPF_MUL, R0, R7),
- BPF_ALU64_REG(BPF_MUL, R0, R8),
- BPF_ALU64_REG(BPF_MUL, R0, R9),
- BPF_ALU64_IMM(BPF_MUL, R0, 10),
- BPF_JMP_IMM(BPF_JEQ, R0, 439084800, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_REG(BPF_MUL, R1, R0),
- BPF_ALU64_REG(BPF_MUL, R1, R2),
- BPF_ALU64_REG(BPF_MUL, R1, R3),
- BPF_ALU64_REG(BPF_MUL, R1, R4),
- BPF_ALU64_REG(BPF_MUL, R1, R5),
- BPF_ALU64_REG(BPF_MUL, R1, R6),
- BPF_ALU64_REG(BPF_MUL, R1, R7),
- BPF_ALU64_REG(BPF_MUL, R1, R8),
- BPF_ALU64_REG(BPF_MUL, R1, R9),
- BPF_ALU64_IMM(BPF_MUL, R1, 10),
- BPF_ALU64_REG(BPF_MOV, R2, R1),
- BPF_ALU64_IMM(BPF_RSH, R2, 32),
- BPF_JMP_IMM(BPF_JEQ, R2, 0x5a924, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_IMM(BPF_LSH, R1, 32),
- BPF_ALU64_IMM(BPF_ARSH, R1, 32),
- BPF_JMP_IMM(BPF_JEQ, R1, 0xebb90000, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_REG(BPF_MUL, R2, R0),
- BPF_ALU64_REG(BPF_MUL, R2, R1),
- BPF_ALU64_REG(BPF_MUL, R2, R3),
- BPF_ALU64_REG(BPF_MUL, R2, R4),
- BPF_ALU64_REG(BPF_MUL, R2, R5),
- BPF_ALU64_REG(BPF_MUL, R2, R6),
- BPF_ALU64_REG(BPF_MUL, R2, R7),
- BPF_ALU64_REG(BPF_MUL, R2, R8),
- BPF_ALU64_REG(BPF_MUL, R2, R9),
- BPF_ALU64_IMM(BPF_MUL, R2, 10),
- BPF_ALU64_IMM(BPF_RSH, R2, 32),
- BPF_ALU64_REG(BPF_MOV, R0, R2),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x35d97ef2 } }
- },
- { /* Mainly checking JIT here. */
- "MOV REG64",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
- BPF_MOV64_REG(R1, R0),
- BPF_MOV64_REG(R2, R1),
- BPF_MOV64_REG(R3, R2),
- BPF_MOV64_REG(R4, R3),
- BPF_MOV64_REG(R5, R4),
- BPF_MOV64_REG(R6, R5),
- BPF_MOV64_REG(R7, R6),
- BPF_MOV64_REG(R8, R7),
- BPF_MOV64_REG(R9, R8),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_ALU64_IMM(BPF_MOV, R1, 0),
- BPF_ALU64_IMM(BPF_MOV, R2, 0),
- BPF_ALU64_IMM(BPF_MOV, R3, 0),
- BPF_ALU64_IMM(BPF_MOV, R4, 0),
- BPF_ALU64_IMM(BPF_MOV, R5, 0),
- BPF_ALU64_IMM(BPF_MOV, R6, 0),
- BPF_ALU64_IMM(BPF_MOV, R7, 0),
- BPF_ALU64_IMM(BPF_MOV, R8, 0),
- BPF_ALU64_IMM(BPF_MOV, R9, 0),
- BPF_ALU64_REG(BPF_ADD, R0, R0),
- BPF_ALU64_REG(BPF_ADD, R0, R1),
- BPF_ALU64_REG(BPF_ADD, R0, R2),
- BPF_ALU64_REG(BPF_ADD, R0, R3),
- BPF_ALU64_REG(BPF_ADD, R0, R4),
- BPF_ALU64_REG(BPF_ADD, R0, R5),
- BPF_ALU64_REG(BPF_ADD, R0, R6),
- BPF_ALU64_REG(BPF_ADD, R0, R7),
- BPF_ALU64_REG(BPF_ADD, R0, R8),
- BPF_ALU64_REG(BPF_ADD, R0, R9),
- BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xfefe } }
- },
- { /* Mainly checking JIT here. */
- "MOV REG32",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
- BPF_MOV64_REG(R1, R0),
- BPF_MOV64_REG(R2, R1),
- BPF_MOV64_REG(R3, R2),
- BPF_MOV64_REG(R4, R3),
- BPF_MOV64_REG(R5, R4),
- BPF_MOV64_REG(R6, R5),
- BPF_MOV64_REG(R7, R6),
- BPF_MOV64_REG(R8, R7),
- BPF_MOV64_REG(R9, R8),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_ALU32_IMM(BPF_MOV, R1, 0),
- BPF_ALU32_IMM(BPF_MOV, R2, 0),
- BPF_ALU32_IMM(BPF_MOV, R3, 0),
- BPF_ALU32_IMM(BPF_MOV, R4, 0),
- BPF_ALU32_IMM(BPF_MOV, R5, 0),
- BPF_ALU32_IMM(BPF_MOV, R6, 0),
- BPF_ALU32_IMM(BPF_MOV, R7, 0),
- BPF_ALU32_IMM(BPF_MOV, R8, 0),
- BPF_ALU32_IMM(BPF_MOV, R9, 0),
- BPF_ALU64_REG(BPF_ADD, R0, R0),
- BPF_ALU64_REG(BPF_ADD, R0, R1),
- BPF_ALU64_REG(BPF_ADD, R0, R2),
- BPF_ALU64_REG(BPF_ADD, R0, R3),
- BPF_ALU64_REG(BPF_ADD, R0, R4),
- BPF_ALU64_REG(BPF_ADD, R0, R5),
- BPF_ALU64_REG(BPF_ADD, R0, R6),
- BPF_ALU64_REG(BPF_ADD, R0, R7),
- BPF_ALU64_REG(BPF_ADD, R0, R8),
- BPF_ALU64_REG(BPF_ADD, R0, R9),
- BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xfefe } }
- },
- { /* Mainly checking JIT here. */
- "LD IMM64",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
- BPF_MOV64_REG(R1, R0),
- BPF_MOV64_REG(R2, R1),
- BPF_MOV64_REG(R3, R2),
- BPF_MOV64_REG(R4, R3),
- BPF_MOV64_REG(R5, R4),
- BPF_MOV64_REG(R6, R5),
- BPF_MOV64_REG(R7, R6),
- BPF_MOV64_REG(R8, R7),
- BPF_MOV64_REG(R9, R8),
- BPF_LD_IMM64(R0, 0x0LL),
- BPF_LD_IMM64(R1, 0x0LL),
- BPF_LD_IMM64(R2, 0x0LL),
- BPF_LD_IMM64(R3, 0x0LL),
- BPF_LD_IMM64(R4, 0x0LL),
- BPF_LD_IMM64(R5, 0x0LL),
- BPF_LD_IMM64(R6, 0x0LL),
- BPF_LD_IMM64(R7, 0x0LL),
- BPF_LD_IMM64(R8, 0x0LL),
- BPF_LD_IMM64(R9, 0x0LL),
- BPF_ALU64_REG(BPF_ADD, R0, R0),
- BPF_ALU64_REG(BPF_ADD, R0, R1),
- BPF_ALU64_REG(BPF_ADD, R0, R2),
- BPF_ALU64_REG(BPF_ADD, R0, R3),
- BPF_ALU64_REG(BPF_ADD, R0, R4),
- BPF_ALU64_REG(BPF_ADD, R0, R5),
- BPF_ALU64_REG(BPF_ADD, R0, R6),
- BPF_ALU64_REG(BPF_ADD, R0, R7),
- BPF_ALU64_REG(BPF_ADD, R0, R8),
- BPF_ALU64_REG(BPF_ADD, R0, R9),
- BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xfefe } }
- },
- {
- "INT: ALU MIX",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 11),
- BPF_ALU64_IMM(BPF_ADD, R0, -1),
- BPF_ALU64_IMM(BPF_MOV, R2, 2),
- BPF_ALU64_IMM(BPF_XOR, R2, 3),
- BPF_ALU64_REG(BPF_DIV, R0, R2),
- BPF_JMP_IMM(BPF_JEQ, R0, 10, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_IMM(BPF_MOD, R0, 3),
- BPF_JMP_IMM(BPF_JEQ, R0, 1, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_IMM(BPF_MOV, R0, -1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -1 } }
- },
- {
- "INT: shifts by register",
- .u.insns_int = {
- BPF_MOV64_IMM(R0, -1234),
- BPF_MOV64_IMM(R1, 1),
- BPF_ALU32_REG(BPF_RSH, R0, R1),
- BPF_JMP_IMM(BPF_JEQ, R0, 0x7ffffd97, 1),
- BPF_EXIT_INSN(),
- BPF_MOV64_IMM(R2, 1),
- BPF_ALU64_REG(BPF_LSH, R0, R2),
- BPF_MOV32_IMM(R4, -1234),
- BPF_JMP_REG(BPF_JEQ, R0, R4, 1),
- BPF_EXIT_INSN(),
- BPF_ALU64_IMM(BPF_AND, R4, 63),
- BPF_ALU64_REG(BPF_LSH, R0, R4), /* R0 <= 46 */
- BPF_MOV64_IMM(R3, 47),
- BPF_ALU64_REG(BPF_ARSH, R0, R3),
- BPF_JMP_IMM(BPF_JEQ, R0, -617, 1),
- BPF_EXIT_INSN(),
- BPF_MOV64_IMM(R2, 1),
- BPF_ALU64_REG(BPF_LSH, R4, R2), /* R4 = 46 << 1 */
- BPF_JMP_IMM(BPF_JEQ, R4, 92, 1),
- BPF_EXIT_INSN(),
- BPF_MOV64_IMM(R4, 4),
- BPF_ALU64_REG(BPF_LSH, R4, R4), /* R4 = 4 << 4 */
- BPF_JMP_IMM(BPF_JEQ, R4, 64, 1),
- BPF_EXIT_INSN(),
- BPF_MOV64_IMM(R4, 5),
- BPF_ALU32_REG(BPF_LSH, R4, R4), /* R4 = 5 << 5 */
- BPF_JMP_IMM(BPF_JEQ, R4, 160, 1),
- BPF_EXIT_INSN(),
- BPF_MOV64_IMM(R0, -1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -1 } }
- },
- #ifdef CONFIG_32BIT
- {
- "INT: 32-bit context pointer word order and zero-extension",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_JMP32_IMM(BPF_JEQ, R1, 0, 3),
- BPF_ALU64_IMM(BPF_RSH, R1, 32),
- BPF_JMP32_IMM(BPF_JNE, R1, 0, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } }
- },
- #endif
- {
- "check: missing ret",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_IMM, 1),
- },
- CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
- { },
- { },
- .fill_helper = NULL,
- .expected_errcode = -EINVAL,
- },
- {
- "check: div_k_0",
- .u.insns = {
- BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0),
- BPF_STMT(BPF_RET | BPF_K, 0)
- },
- CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
- { },
- { },
- .fill_helper = NULL,
- .expected_errcode = -EINVAL,
- },
- {
- "check: unknown insn",
- .u.insns = {
- /* seccomp insn, rejected in socket filter */
- BPF_STMT(BPF_LDX | BPF_W | BPF_ABS, 0),
- BPF_STMT(BPF_RET | BPF_K, 0)
- },
- CLASSIC | FLAG_EXPECTED_FAIL,
- { },
- { },
- .fill_helper = NULL,
- .expected_errcode = -EINVAL,
- },
- {
- "check: out of range spill/fill",
- .u.insns = {
- BPF_STMT(BPF_STX, 16),
- BPF_STMT(BPF_RET | BPF_K, 0)
- },
- CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
- { },
- { },
- .fill_helper = NULL,
- .expected_errcode = -EINVAL,
- },
- {
- "JUMPS + HOLES",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 15),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 3, 4),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 1, 2),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15),
- BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 2, 3),
- BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 1, 2),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15),
- BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 2, 3),
- BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 1, 2),
- BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
- BPF_STMT(BPF_RET | BPF_A, 0),
- BPF_STMT(BPF_RET | BPF_A, 0),
- },
- CLASSIC,
- { 0x00, 0x1b, 0x21, 0x3c, 0x9d, 0xf8,
- 0x90, 0xe2, 0xba, 0x0a, 0x56, 0xb4,
- 0x08, 0x00,
- 0x45, 0x00, 0x00, 0x28, 0x00, 0x00,
- 0x20, 0x00, 0x40, 0x11, 0x00, 0x00, /* IP header */
- 0xc0, 0xa8, 0x33, 0x01,
- 0xc0, 0xa8, 0x33, 0x02,
- 0xbb, 0xb6,
- 0xa9, 0xfa,
- 0x00, 0x14, 0x00, 0x00,
- 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
- 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
- 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
- 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
- 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
- 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
- 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
- 0xcc, 0xcc, 0xcc, 0xcc },
- { { 88, 0x001b } }
- },
- {
- "check: RET X",
- .u.insns = {
- BPF_STMT(BPF_RET | BPF_X, 0),
- },
- CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
- { },
- { },
- .fill_helper = NULL,
- .expected_errcode = -EINVAL,
- },
- {
- "check: LDX + RET X",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 42),
- BPF_STMT(BPF_RET | BPF_X, 0),
- },
- CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
- { },
- { },
- .fill_helper = NULL,
- .expected_errcode = -EINVAL,
- },
- { /* Mainly checking JIT here. */
- "M[]: alt STX + LDX",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 100),
- BPF_STMT(BPF_STX, 0),
- BPF_STMT(BPF_LDX | BPF_MEM, 0),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_STX, 1),
- BPF_STMT(BPF_LDX | BPF_MEM, 1),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_STX, 2),
- BPF_STMT(BPF_LDX | BPF_MEM, 2),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_STX, 3),
- BPF_STMT(BPF_LDX | BPF_MEM, 3),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_STX, 4),
- BPF_STMT(BPF_LDX | BPF_MEM, 4),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_STX, 5),
- BPF_STMT(BPF_LDX | BPF_MEM, 5),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_STX, 6),
- BPF_STMT(BPF_LDX | BPF_MEM, 6),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_STX, 7),
- BPF_STMT(BPF_LDX | BPF_MEM, 7),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_STX, 8),
- BPF_STMT(BPF_LDX | BPF_MEM, 8),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_STX, 9),
- BPF_STMT(BPF_LDX | BPF_MEM, 9),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_STX, 10),
- BPF_STMT(BPF_LDX | BPF_MEM, 10),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_STX, 11),
- BPF_STMT(BPF_LDX | BPF_MEM, 11),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_STX, 12),
- BPF_STMT(BPF_LDX | BPF_MEM, 12),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_STX, 13),
- BPF_STMT(BPF_LDX | BPF_MEM, 13),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_STX, 14),
- BPF_STMT(BPF_LDX | BPF_MEM, 14),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_STX, 15),
- BPF_STMT(BPF_LDX | BPF_MEM, 15),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_RET | BPF_A, 0),
- },
- CLASSIC | FLAG_NO_DATA,
- { },
- { { 0, 116 } },
- },
- { /* Mainly checking JIT here. */
- "M[]: full STX + full LDX",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0xbadfeedb),
- BPF_STMT(BPF_STX, 0),
- BPF_STMT(BPF_LDX | BPF_IMM, 0xecabedae),
- BPF_STMT(BPF_STX, 1),
- BPF_STMT(BPF_LDX | BPF_IMM, 0xafccfeaf),
- BPF_STMT(BPF_STX, 2),
- BPF_STMT(BPF_LDX | BPF_IMM, 0xbffdcedc),
- BPF_STMT(BPF_STX, 3),
- BPF_STMT(BPF_LDX | BPF_IMM, 0xfbbbdccb),
- BPF_STMT(BPF_STX, 4),
- BPF_STMT(BPF_LDX | BPF_IMM, 0xfbabcbda),
- BPF_STMT(BPF_STX, 5),
- BPF_STMT(BPF_LDX | BPF_IMM, 0xaedecbdb),
- BPF_STMT(BPF_STX, 6),
- BPF_STMT(BPF_LDX | BPF_IMM, 0xadebbade),
- BPF_STMT(BPF_STX, 7),
- BPF_STMT(BPF_LDX | BPF_IMM, 0xfcfcfaec),
- BPF_STMT(BPF_STX, 8),
- BPF_STMT(BPF_LDX | BPF_IMM, 0xbcdddbdc),
- BPF_STMT(BPF_STX, 9),
- BPF_STMT(BPF_LDX | BPF_IMM, 0xfeefdfac),
- BPF_STMT(BPF_STX, 10),
- BPF_STMT(BPF_LDX | BPF_IMM, 0xcddcdeea),
- BPF_STMT(BPF_STX, 11),
- BPF_STMT(BPF_LDX | BPF_IMM, 0xaccfaebb),
- BPF_STMT(BPF_STX, 12),
- BPF_STMT(BPF_LDX | BPF_IMM, 0xbdcccdcf),
- BPF_STMT(BPF_STX, 13),
- BPF_STMT(BPF_LDX | BPF_IMM, 0xaaedecde),
- BPF_STMT(BPF_STX, 14),
- BPF_STMT(BPF_LDX | BPF_IMM, 0xfaeacdad),
- BPF_STMT(BPF_STX, 15),
- BPF_STMT(BPF_LDX | BPF_MEM, 0),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_LDX | BPF_MEM, 1),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_LDX | BPF_MEM, 2),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_LDX | BPF_MEM, 3),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_LDX | BPF_MEM, 4),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_LDX | BPF_MEM, 5),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_LDX | BPF_MEM, 6),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_LDX | BPF_MEM, 7),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_LDX | BPF_MEM, 8),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_LDX | BPF_MEM, 9),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_LDX | BPF_MEM, 10),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_LDX | BPF_MEM, 11),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_LDX | BPF_MEM, 12),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_LDX | BPF_MEM, 13),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_LDX | BPF_MEM, 14),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_LDX | BPF_MEM, 15),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_RET | BPF_A, 0),
- },
- CLASSIC | FLAG_NO_DATA,
- { },
- { { 0, 0x2a5a5e5 } },
- },
- {
- "check: SKF_AD_MAX",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF + SKF_AD_MAX),
- BPF_STMT(BPF_RET | BPF_A, 0),
- },
- CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
- { },
- { },
- .fill_helper = NULL,
- .expected_errcode = -EINVAL,
- },
- { /* Passes checker but fails during runtime. */
- "LD [SKF_AD_OFF-1]",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
- SKF_AD_OFF - 1),
- BPF_STMT(BPF_RET | BPF_K, 1),
- },
- CLASSIC,
- { },
- { { 1, 0 } },
- },
- {
- "load 64-bit immediate",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x567800001234LL),
- BPF_MOV64_REG(R2, R1),
- BPF_MOV64_REG(R3, R2),
- BPF_ALU64_IMM(BPF_RSH, R2, 32),
- BPF_ALU64_IMM(BPF_LSH, R3, 32),
- BPF_ALU64_IMM(BPF_RSH, R3, 32),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_JMP_IMM(BPF_JEQ, R2, 0x5678, 1),
- BPF_EXIT_INSN(),
- BPF_JMP_IMM(BPF_JEQ, R3, 0x1234, 1),
- BPF_EXIT_INSN(),
- BPF_LD_IMM64(R0, 0x1ffffffffLL),
- BPF_ALU64_IMM(BPF_RSH, R0, 32), /* R0 = 1 */
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } }
- },
- /* BPF_ALU | BPF_MOV | BPF_X */
- {
- "ALU_MOV_X: dst = 2",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R1, 2),
- BPF_ALU32_REG(BPF_MOV, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- {
- "ALU_MOV_X: dst = 4294967295",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
- BPF_ALU32_REG(BPF_MOV, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 4294967295U } },
- },
- {
- "ALU64_MOV_X: dst = 2",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R1, 2),
- BPF_ALU64_REG(BPF_MOV, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- {
- "ALU64_MOV_X: dst = 4294967295",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
- BPF_ALU64_REG(BPF_MOV, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 4294967295U } },
- },
- /* BPF_ALU | BPF_MOV | BPF_K */
- {
- "ALU_MOV_K: dst = 2",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 2),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- {
- "ALU_MOV_K: dst = 4294967295",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 4294967295U),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 4294967295U } },
- },
- {
- "ALU_MOV_K: 0x0000ffffffff0000 = 0x00000000ffffffff",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
- BPF_LD_IMM64(R3, 0x00000000ffffffffLL),
- BPF_ALU32_IMM(BPF_MOV, R2, 0xffffffff),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU_MOV_K: small negative",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -123),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -123 } }
- },
- {
- "ALU_MOV_K: small negative zero extension",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -123),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } }
- },
- {
- "ALU_MOV_K: large negative",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -123456789),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -123456789 } }
- },
- {
- "ALU_MOV_K: large negative zero extension",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -123456789),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } }
- },
- {
- "ALU64_MOV_K: dst = 2",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 2),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- {
- "ALU64_MOV_K: dst = 2147483647",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 2147483647),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2147483647 } },
- },
- {
- "ALU64_OR_K: dst = 0x0",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
- BPF_LD_IMM64(R3, 0x0),
- BPF_ALU64_IMM(BPF_MOV, R2, 0x0),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU64_MOV_K: dst = -1",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
- BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
- BPF_ALU64_IMM(BPF_MOV, R2, 0xffffffff),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU64_MOV_K: small negative",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, -123),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -123 } }
- },
- {
- "ALU64_MOV_K: small negative sign extension",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, -123),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xffffffff } }
- },
- {
- "ALU64_MOV_K: large negative",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, -123456789),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -123456789 } }
- },
- {
- "ALU64_MOV_K: large negative sign extension",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, -123456789),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xffffffff } }
- },
- /* BPF_ALU | BPF_ADD | BPF_X */
- {
- "ALU_ADD_X: 1 + 2 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU32_IMM(BPF_MOV, R1, 2),
- BPF_ALU32_REG(BPF_ADD, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU_ADD_X: 1 + 4294967294 = 4294967295",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
- BPF_ALU32_REG(BPF_ADD, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 4294967295U } },
- },
- {
- "ALU_ADD_X: 2 + 4294967294 = 0",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 2),
- BPF_LD_IMM64(R1, 4294967294U),
- BPF_ALU32_REG(BPF_ADD, R0, R1),
- BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU64_ADD_X: 1 + 2 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU32_IMM(BPF_MOV, R1, 2),
- BPF_ALU64_REG(BPF_ADD, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU64_ADD_X: 1 + 4294967294 = 4294967295",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
- BPF_ALU64_REG(BPF_ADD, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 4294967295U } },
- },
- {
- "ALU64_ADD_X: 2 + 4294967294 = 4294967296",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 2),
- BPF_LD_IMM64(R1, 4294967294U),
- BPF_LD_IMM64(R2, 4294967296ULL),
- BPF_ALU64_REG(BPF_ADD, R0, R1),
- BPF_JMP_REG(BPF_JEQ, R0, R2, 2),
- BPF_MOV32_IMM(R0, 0),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_ALU | BPF_ADD | BPF_K */
- {
- "ALU_ADD_K: 1 + 2 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU32_IMM(BPF_ADD, R0, 2),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU_ADD_K: 3 + 0 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU32_IMM(BPF_ADD, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU_ADD_K: 1 + 4294967294 = 4294967295",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU32_IMM(BPF_ADD, R0, 4294967294U),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 4294967295U } },
- },
- {
- "ALU_ADD_K: 4294967294 + 2 = 0",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 4294967294U),
- BPF_ALU32_IMM(BPF_ADD, R0, 2),
- BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU_ADD_K: 0 + (-1) = 0x00000000ffffffff",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x0),
- BPF_LD_IMM64(R3, 0x00000000ffffffff),
- BPF_ALU32_IMM(BPF_ADD, R2, 0xffffffff),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU_ADD_K: 0 + 0xffff = 0xffff",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x0),
- BPF_LD_IMM64(R3, 0xffff),
- BPF_ALU32_IMM(BPF_ADD, R2, 0xffff),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x0),
- BPF_LD_IMM64(R3, 0x7fffffff),
- BPF_ALU32_IMM(BPF_ADD, R2, 0x7fffffff),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU_ADD_K: 0 + 0x80000000 = 0x80000000",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x0),
- BPF_LD_IMM64(R3, 0x80000000),
- BPF_ALU32_IMM(BPF_ADD, R2, 0x80000000),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU_ADD_K: 0 + 0x80008000 = 0x80008000",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x0),
- BPF_LD_IMM64(R3, 0x80008000),
- BPF_ALU32_IMM(BPF_ADD, R2, 0x80008000),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU64_ADD_K: 1 + 2 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU64_IMM(BPF_ADD, R0, 2),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU64_ADD_K: 3 + 0 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU64_IMM(BPF_ADD, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU64_ADD_K: 1 + 2147483646 = 2147483647",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU64_IMM(BPF_ADD, R0, 2147483646),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2147483647 } },
- },
- {
- "ALU64_ADD_K: 4294967294 + 2 = 4294967296",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 4294967294U),
- BPF_LD_IMM64(R1, 4294967296ULL),
- BPF_ALU64_IMM(BPF_ADD, R0, 2),
- BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU64_ADD_K: 2147483646 + -2147483647 = -1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 2147483646),
- BPF_ALU64_IMM(BPF_ADD, R0, -2147483647),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -1 } },
- },
- {
- "ALU64_ADD_K: 1 + 0 = 1",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x1),
- BPF_LD_IMM64(R3, 0x1),
- BPF_ALU64_IMM(BPF_ADD, R2, 0x0),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU64_ADD_K: 0 + (-1) = 0xffffffffffffffff",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x0),
- BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
- BPF_ALU64_IMM(BPF_ADD, R2, 0xffffffff),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU64_ADD_K: 0 + 0xffff = 0xffff",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x0),
- BPF_LD_IMM64(R3, 0xffff),
- BPF_ALU64_IMM(BPF_ADD, R2, 0xffff),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU64_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x0),
- BPF_LD_IMM64(R3, 0x7fffffff),
- BPF_ALU64_IMM(BPF_ADD, R2, 0x7fffffff),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU64_ADD_K: 0 + 0x80000000 = 0xffffffff80000000",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x0),
- BPF_LD_IMM64(R3, 0xffffffff80000000LL),
- BPF_ALU64_IMM(BPF_ADD, R2, 0x80000000),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU_ADD_K: 0 + 0x80008000 = 0xffffffff80008000",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x0),
- BPF_LD_IMM64(R3, 0xffffffff80008000LL),
- BPF_ALU64_IMM(BPF_ADD, R2, 0x80008000),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- /* BPF_ALU | BPF_SUB | BPF_X */
- {
- "ALU_SUB_X: 3 - 1 = 2",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU32_IMM(BPF_MOV, R1, 1),
- BPF_ALU32_REG(BPF_SUB, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- {
- "ALU_SUB_X: 4294967295 - 4294967294 = 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 4294967295U),
- BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
- BPF_ALU32_REG(BPF_SUB, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU64_SUB_X: 3 - 1 = 2",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU32_IMM(BPF_MOV, R1, 1),
- BPF_ALU64_REG(BPF_SUB, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- {
- "ALU64_SUB_X: 4294967295 - 4294967294 = 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 4294967295U),
- BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
- BPF_ALU64_REG(BPF_SUB, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_ALU | BPF_SUB | BPF_K */
- {
- "ALU_SUB_K: 3 - 1 = 2",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU32_IMM(BPF_SUB, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- {
- "ALU_SUB_K: 3 - 0 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU32_IMM(BPF_SUB, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU_SUB_K: 4294967295 - 4294967294 = 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 4294967295U),
- BPF_ALU32_IMM(BPF_SUB, R0, 4294967294U),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU64_SUB_K: 3 - 1 = 2",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU64_IMM(BPF_SUB, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- {
- "ALU64_SUB_K: 3 - 0 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU64_IMM(BPF_SUB, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU64_SUB_K: 4294967294 - 4294967295 = -1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 4294967294U),
- BPF_ALU64_IMM(BPF_SUB, R0, 4294967295U),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -1 } },
- },
- {
- "ALU64_ADD_K: 2147483646 - 2147483647 = -1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 2147483646),
- BPF_ALU64_IMM(BPF_SUB, R0, 2147483647),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -1 } },
- },
- /* BPF_ALU | BPF_MUL | BPF_X */
- {
- "ALU_MUL_X: 2 * 3 = 6",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 2),
- BPF_ALU32_IMM(BPF_MOV, R1, 3),
- BPF_ALU32_REG(BPF_MUL, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 6 } },
- },
- {
- "ALU_MUL_X: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 2),
- BPF_ALU32_IMM(BPF_MOV, R1, 0x7FFFFFF8),
- BPF_ALU32_REG(BPF_MUL, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xFFFFFFF0 } },
- },
- {
- "ALU_MUL_X: -1 * -1 = 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, -1),
- BPF_ALU32_IMM(BPF_MOV, R1, -1),
- BPF_ALU32_REG(BPF_MUL, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU64_MUL_X: 2 * 3 = 6",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 2),
- BPF_ALU32_IMM(BPF_MOV, R1, 3),
- BPF_ALU64_REG(BPF_MUL, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 6 } },
- },
- {
- "ALU64_MUL_X: 1 * 2147483647 = 2147483647",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
- BPF_ALU64_REG(BPF_MUL, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2147483647 } },
- },
- {
- "ALU64_MUL_X: 64x64 multiply, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0fedcba987654321LL),
- BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
- BPF_ALU64_REG(BPF_MUL, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xe5618cf0 } }
- },
- {
- "ALU64_MUL_X: 64x64 multiply, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0fedcba987654321LL),
- BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
- BPF_ALU64_REG(BPF_MUL, R0, R1),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x2236d88f } }
- },
- /* BPF_ALU | BPF_MUL | BPF_K */
- {
- "ALU_MUL_K: 2 * 3 = 6",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 2),
- BPF_ALU32_IMM(BPF_MUL, R0, 3),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 6 } },
- },
- {
- "ALU_MUL_K: 3 * 1 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU32_IMM(BPF_MUL, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU_MUL_K: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 2),
- BPF_ALU32_IMM(BPF_MUL, R0, 0x7FFFFFF8),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xFFFFFFF0 } },
- },
- {
- "ALU_MUL_K: 1 * (-1) = 0x00000000ffffffff",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x1),
- BPF_LD_IMM64(R3, 0x00000000ffffffff),
- BPF_ALU32_IMM(BPF_MUL, R2, 0xffffffff),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU64_MUL_K: 2 * 3 = 6",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 2),
- BPF_ALU64_IMM(BPF_MUL, R0, 3),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 6 } },
- },
- {
- "ALU64_MUL_K: 3 * 1 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU64_IMM(BPF_MUL, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU64_MUL_K: 1 * 2147483647 = 2147483647",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU64_IMM(BPF_MUL, R0, 2147483647),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2147483647 } },
- },
- {
- "ALU64_MUL_K: 1 * -2147483647 = -2147483647",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU64_IMM(BPF_MUL, R0, -2147483647),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -2147483647 } },
- },
- {
- "ALU64_MUL_K: 1 * (-1) = 0xffffffffffffffff",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x1),
- BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
- BPF_ALU64_IMM(BPF_MUL, R2, 0xffffffff),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU64_MUL_K: 64x32 multiply, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ALU64_IMM(BPF_MUL, R0, 0x12345678),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xe242d208 } }
- },
- {
- "ALU64_MUL_K: 64x32 multiply, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ALU64_IMM(BPF_MUL, R0, 0x12345678),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xc28f5c28 } }
- },
- /* BPF_ALU | BPF_DIV | BPF_X */
- {
- "ALU_DIV_X: 6 / 2 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 6),
- BPF_ALU32_IMM(BPF_MOV, R1, 2),
- BPF_ALU32_REG(BPF_DIV, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU_DIV_X: 4294967295 / 4294967295 = 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 4294967295U),
- BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
- BPF_ALU32_REG(BPF_DIV, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU64_DIV_X: 6 / 2 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 6),
- BPF_ALU32_IMM(BPF_MOV, R1, 2),
- BPF_ALU64_REG(BPF_DIV, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU64_DIV_X: 2147483647 / 2147483647 = 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 2147483647),
- BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
- BPF_ALU64_REG(BPF_DIV, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU64_DIV_X: 0xffffffffffffffff / (-1) = 0x0000000000000001",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
- BPF_LD_IMM64(R4, 0xffffffffffffffffLL),
- BPF_LD_IMM64(R3, 0x0000000000000001LL),
- BPF_ALU64_REG(BPF_DIV, R2, R4),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- /* BPF_ALU | BPF_DIV | BPF_K */
- {
- "ALU_DIV_K: 6 / 2 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 6),
- BPF_ALU32_IMM(BPF_DIV, R0, 2),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU_DIV_K: 3 / 1 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU32_IMM(BPF_DIV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU_DIV_K: 4294967295 / 4294967295 = 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 4294967295U),
- BPF_ALU32_IMM(BPF_DIV, R0, 4294967295U),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU_DIV_K: 0xffffffffffffffff / (-1) = 0x1",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
- BPF_LD_IMM64(R3, 0x1UL),
- BPF_ALU32_IMM(BPF_DIV, R2, 0xffffffff),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU64_DIV_K: 6 / 2 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 6),
- BPF_ALU64_IMM(BPF_DIV, R0, 2),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU64_DIV_K: 3 / 1 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU64_IMM(BPF_DIV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU64_DIV_K: 2147483647 / 2147483647 = 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 2147483647),
- BPF_ALU64_IMM(BPF_DIV, R0, 2147483647),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU64_DIV_K: 0xffffffffffffffff / (-1) = 0x0000000000000001",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
- BPF_LD_IMM64(R3, 0x0000000000000001LL),
- BPF_ALU64_IMM(BPF_DIV, R2, 0xffffffff),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- /* BPF_ALU | BPF_MOD | BPF_X */
- {
- "ALU_MOD_X: 3 % 2 = 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU32_IMM(BPF_MOV, R1, 2),
- BPF_ALU32_REG(BPF_MOD, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU_MOD_X: 4294967295 % 4294967293 = 2",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 4294967295U),
- BPF_ALU32_IMM(BPF_MOV, R1, 4294967293U),
- BPF_ALU32_REG(BPF_MOD, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- {
- "ALU64_MOD_X: 3 % 2 = 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU32_IMM(BPF_MOV, R1, 2),
- BPF_ALU64_REG(BPF_MOD, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU64_MOD_X: 2147483647 % 2147483645 = 2",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 2147483647),
- BPF_ALU32_IMM(BPF_MOV, R1, 2147483645),
- BPF_ALU64_REG(BPF_MOD, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- /* BPF_ALU | BPF_MOD | BPF_K */
- {
- "ALU_MOD_K: 3 % 2 = 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU32_IMM(BPF_MOD, R0, 2),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU_MOD_K: 3 % 1 = 0",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU32_IMM(BPF_MOD, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- },
- {
- "ALU_MOD_K: 4294967295 % 4294967293 = 2",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 4294967295U),
- BPF_ALU32_IMM(BPF_MOD, R0, 4294967293U),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- {
- "ALU64_MOD_K: 3 % 2 = 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU64_IMM(BPF_MOD, R0, 2),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU64_MOD_K: 3 % 1 = 0",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU64_IMM(BPF_MOD, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- },
- {
- "ALU64_MOD_K: 2147483647 % 2147483645 = 2",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 2147483647),
- BPF_ALU64_IMM(BPF_MOD, R0, 2147483645),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- /* BPF_ALU | BPF_AND | BPF_X */
- {
- "ALU_AND_X: 3 & 2 = 2",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU32_IMM(BPF_MOV, R1, 2),
- BPF_ALU32_REG(BPF_AND, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- {
- "ALU_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0xffffffff),
- BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
- BPF_ALU32_REG(BPF_AND, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xffffffff } },
- },
- {
- "ALU64_AND_X: 3 & 2 = 2",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU32_IMM(BPF_MOV, R1, 2),
- BPF_ALU64_REG(BPF_AND, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- {
- "ALU64_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0xffffffff),
- BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
- BPF_ALU64_REG(BPF_AND, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xffffffff } },
- },
- /* BPF_ALU | BPF_AND | BPF_K */
- {
- "ALU_AND_K: 3 & 2 = 2",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU32_IMM(BPF_AND, R0, 2),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- {
- "ALU_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0xffffffff),
- BPF_ALU32_IMM(BPF_AND, R0, 0xffffffff),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xffffffff } },
- },
- {
- "ALU_AND_K: Small immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
- BPF_ALU32_IMM(BPF_AND, R0, 15),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 4 } }
- },
- {
- "ALU_AND_K: Large immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0xf1f2f3f4),
- BPF_ALU32_IMM(BPF_AND, R0, 0xafbfcfdf),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xa1b2c3d4 } }
- },
- {
- "ALU_AND_K: Zero extension",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_LD_IMM64(R1, 0x0000000080a0c0e0LL),
- BPF_ALU32_IMM(BPF_AND, R0, 0xf0f0f0f0),
- BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } }
- },
- {
- "ALU64_AND_K: 3 & 2 = 2",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU64_IMM(BPF_AND, R0, 2),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- {
- "ALU64_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0xffffffff),
- BPF_ALU64_IMM(BPF_AND, R0, 0xffffffff),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xffffffff } },
- },
- {
- "ALU64_AND_K: 0x0000ffffffff0000 & 0x0 = 0x0000000000000000",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
- BPF_LD_IMM64(R3, 0x0000000000000000LL),
- BPF_ALU64_IMM(BPF_AND, R2, 0x0),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU64_AND_K: 0x0000ffffffff0000 & -1 = 0x0000ffffffff0000",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
- BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
- BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU64_AND_K: 0xffffffffffffffff & -1 = 0xffffffffffffffff",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
- BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
- BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU64_AND_K: Sign extension 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_LD_IMM64(R1, 0x00000000090b0d0fLL),
- BPF_ALU64_IMM(BPF_AND, R0, 0x0f0f0f0f),
- BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } }
- },
- {
- "ALU64_AND_K: Sign extension 2",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_LD_IMM64(R1, 0x0123456780a0c0e0LL),
- BPF_ALU64_IMM(BPF_AND, R0, 0xf0f0f0f0),
- BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } }
- },
- /* BPF_ALU | BPF_OR | BPF_X */
- {
- "ALU_OR_X: 1 | 2 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU32_IMM(BPF_MOV, R1, 2),
- BPF_ALU32_REG(BPF_OR, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU_OR_X: 0x0 | 0xffffffff = 0xffffffff",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0),
- BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
- BPF_ALU32_REG(BPF_OR, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xffffffff } },
- },
- {
- "ALU64_OR_X: 1 | 2 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU32_IMM(BPF_MOV, R1, 2),
- BPF_ALU64_REG(BPF_OR, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU64_OR_X: 0 | 0xffffffff = 0xffffffff",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0),
- BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
- BPF_ALU64_REG(BPF_OR, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xffffffff } },
- },
- /* BPF_ALU | BPF_OR | BPF_K */
- {
- "ALU_OR_K: 1 | 2 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU32_IMM(BPF_OR, R0, 2),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU_OR_K: 0 & 0xffffffff = 0xffffffff",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0),
- BPF_ALU32_IMM(BPF_OR, R0, 0xffffffff),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xffffffff } },
- },
- {
- "ALU_OR_K: Small immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
- BPF_ALU32_IMM(BPF_OR, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x01020305 } }
- },
- {
- "ALU_OR_K: Large immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
- BPF_ALU32_IMM(BPF_OR, R0, 0xa0b0c0d0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xa1b2c3d4 } }
- },
- {
- "ALU_OR_K: Zero extension",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_LD_IMM64(R1, 0x00000000f9fbfdffLL),
- BPF_ALU32_IMM(BPF_OR, R0, 0xf0f0f0f0),
- BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } }
- },
- {
- "ALU64_OR_K: 1 | 2 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU64_IMM(BPF_OR, R0, 2),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU64_OR_K: 0 & 0xffffffff = 0xffffffff",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0),
- BPF_ALU64_IMM(BPF_OR, R0, 0xffffffff),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xffffffff } },
- },
- {
- "ALU64_OR_K: 0x0000ffffffff0000 | 0x0 = 0x0000ffffffff0000",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
- BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
- BPF_ALU64_IMM(BPF_OR, R2, 0x0),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU64_OR_K: 0x0000ffffffff0000 | -1 = 0xffffffffffffffff",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
- BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
- BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU64_OR_K: 0x000000000000000 | -1 = 0xffffffffffffffff",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x0000000000000000LL),
- BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
- BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU64_OR_K: Sign extension 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_LD_IMM64(R1, 0x012345678fafcfefLL),
- BPF_ALU64_IMM(BPF_OR, R0, 0x0f0f0f0f),
- BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } }
- },
- {
- "ALU64_OR_K: Sign extension 2",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_LD_IMM64(R1, 0xfffffffff9fbfdffLL),
- BPF_ALU64_IMM(BPF_OR, R0, 0xf0f0f0f0),
- BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } }
- },
- /* BPF_ALU | BPF_XOR | BPF_X */
- {
- "ALU_XOR_X: 5 ^ 6 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 5),
- BPF_ALU32_IMM(BPF_MOV, R1, 6),
- BPF_ALU32_REG(BPF_XOR, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU_XOR_X: 0x1 ^ 0xffffffff = 0xfffffffe",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
- BPF_ALU32_REG(BPF_XOR, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xfffffffe } },
- },
- {
- "ALU64_XOR_X: 5 ^ 6 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 5),
- BPF_ALU32_IMM(BPF_MOV, R1, 6),
- BPF_ALU64_REG(BPF_XOR, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU64_XOR_X: 1 ^ 0xffffffff = 0xfffffffe",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
- BPF_ALU64_REG(BPF_XOR, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xfffffffe } },
- },
- /* BPF_ALU | BPF_XOR | BPF_K */
- {
- "ALU_XOR_K: 5 ^ 6 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 5),
- BPF_ALU32_IMM(BPF_XOR, R0, 6),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU32_IMM(BPF_XOR, R0, 0xffffffff),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xfffffffe } },
- },
- {
- "ALU_XOR_K: Small immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
- BPF_ALU32_IMM(BPF_XOR, R0, 15),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x0102030b } }
- },
- {
- "ALU_XOR_K: Large immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0xf1f2f3f4),
- BPF_ALU32_IMM(BPF_XOR, R0, 0xafbfcfdf),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x5e4d3c2b } }
- },
- {
- "ALU_XOR_K: Zero extension",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_LD_IMM64(R1, 0x00000000795b3d1fLL),
- BPF_ALU32_IMM(BPF_XOR, R0, 0xf0f0f0f0),
- BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } }
- },
- {
- "ALU64_XOR_K: 5 ^ 6 = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 5),
- BPF_ALU64_IMM(BPF_XOR, R0, 6),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU64_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU64_IMM(BPF_XOR, R0, 0xffffffff),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xfffffffe } },
- },
- {
- "ALU64_XOR_K: 0x0000ffffffff0000 ^ 0x0 = 0x0000ffffffff0000",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
- BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
- BPF_ALU64_IMM(BPF_XOR, R2, 0x0),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU64_XOR_K: 0x0000ffffffff0000 ^ -1 = 0xffff00000000ffff",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
- BPF_LD_IMM64(R3, 0xffff00000000ffffLL),
- BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU64_XOR_K: 0x000000000000000 ^ -1 = 0xffffffffffffffff",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x0000000000000000LL),
- BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
- BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- },
- {
- "ALU64_XOR_K: Sign extension 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_LD_IMM64(R1, 0x0123456786a4c2e0LL),
- BPF_ALU64_IMM(BPF_XOR, R0, 0x0f0f0f0f),
- BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } }
- },
- {
- "ALU64_XOR_K: Sign extension 2",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_LD_IMM64(R1, 0xfedcba98795b3d1fLL),
- BPF_ALU64_IMM(BPF_XOR, R0, 0xf0f0f0f0),
- BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } }
- },
- /* BPF_ALU | BPF_LSH | BPF_X */
- {
- "ALU_LSH_X: 1 << 1 = 2",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU32_IMM(BPF_MOV, R1, 1),
- BPF_ALU32_REG(BPF_LSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- {
- "ALU_LSH_X: 1 << 31 = 0x80000000",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU32_IMM(BPF_MOV, R1, 31),
- BPF_ALU32_REG(BPF_LSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x80000000 } },
- },
- {
- "ALU_LSH_X: 0x12345678 << 12 = 0x45678000",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
- BPF_ALU32_IMM(BPF_MOV, R1, 12),
- BPF_ALU32_REG(BPF_LSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x45678000 } }
- },
- {
- "ALU64_LSH_X: 1 << 1 = 2",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU32_IMM(BPF_MOV, R1, 1),
- BPF_ALU64_REG(BPF_LSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- {
- "ALU64_LSH_X: 1 << 31 = 0x80000000",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU32_IMM(BPF_MOV, R1, 31),
- BPF_ALU64_REG(BPF_LSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x80000000 } },
- },
- {
- "ALU64_LSH_X: Shift < 32, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 12),
- BPF_ALU64_REG(BPF_LSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xbcdef000 } }
- },
- {
- "ALU64_LSH_X: Shift < 32, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 12),
- BPF_ALU64_REG(BPF_LSH, R0, R1),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x3456789a } }
- },
- {
- "ALU64_LSH_X: Shift > 32, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 36),
- BPF_ALU64_REG(BPF_LSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } }
- },
- {
- "ALU64_LSH_X: Shift > 32, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 36),
- BPF_ALU64_REG(BPF_LSH, R0, R1),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x9abcdef0 } }
- },
- {
- "ALU64_LSH_X: Shift == 32, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 32),
- BPF_ALU64_REG(BPF_LSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } }
- },
- {
- "ALU64_LSH_X: Shift == 32, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 32),
- BPF_ALU64_REG(BPF_LSH, R0, R1),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x89abcdef } }
- },
- {
- "ALU64_LSH_X: Zero shift, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 0),
- BPF_ALU64_REG(BPF_LSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x89abcdef } }
- },
- {
- "ALU64_LSH_X: Zero shift, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 0),
- BPF_ALU64_REG(BPF_LSH, R0, R1),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x01234567 } }
- },
- /* BPF_ALU | BPF_LSH | BPF_K */
- {
- "ALU_LSH_K: 1 << 1 = 2",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU32_IMM(BPF_LSH, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- {
- "ALU_LSH_K: 1 << 31 = 0x80000000",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU32_IMM(BPF_LSH, R0, 31),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x80000000 } },
- },
- {
- "ALU_LSH_K: 0x12345678 << 12 = 0x45678000",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
- BPF_ALU32_IMM(BPF_LSH, R0, 12),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x45678000 } }
- },
- {
- "ALU_LSH_K: 0x12345678 << 0 = 0x12345678",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
- BPF_ALU32_IMM(BPF_LSH, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x12345678 } }
- },
- {
- "ALU64_LSH_K: 1 << 1 = 2",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU64_IMM(BPF_LSH, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- {
- "ALU64_LSH_K: 1 << 31 = 0x80000000",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 1),
- BPF_ALU64_IMM(BPF_LSH, R0, 31),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x80000000 } },
- },
- {
- "ALU64_LSH_K: Shift < 32, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ALU64_IMM(BPF_LSH, R0, 12),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xbcdef000 } }
- },
- {
- "ALU64_LSH_K: Shift < 32, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ALU64_IMM(BPF_LSH, R0, 12),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x3456789a } }
- },
- {
- "ALU64_LSH_K: Shift > 32, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ALU64_IMM(BPF_LSH, R0, 36),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } }
- },
- {
- "ALU64_LSH_K: Shift > 32, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ALU64_IMM(BPF_LSH, R0, 36),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x9abcdef0 } }
- },
- {
- "ALU64_LSH_K: Shift == 32, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ALU64_IMM(BPF_LSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } }
- },
- {
- "ALU64_LSH_K: Shift == 32, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ALU64_IMM(BPF_LSH, R0, 32),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x89abcdef } }
- },
- {
- "ALU64_LSH_K: Zero shift",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ALU64_IMM(BPF_LSH, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x89abcdef } }
- },
- /* BPF_ALU | BPF_RSH | BPF_X */
- {
- "ALU_RSH_X: 2 >> 1 = 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 2),
- BPF_ALU32_IMM(BPF_MOV, R1, 1),
- BPF_ALU32_REG(BPF_RSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU_RSH_X: 0x80000000 >> 31 = 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x80000000),
- BPF_ALU32_IMM(BPF_MOV, R1, 31),
- BPF_ALU32_REG(BPF_RSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU_RSH_X: 0x12345678 >> 20 = 0x123",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
- BPF_ALU32_IMM(BPF_MOV, R1, 20),
- BPF_ALU32_REG(BPF_RSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x123 } }
- },
- {
- "ALU64_RSH_X: 2 >> 1 = 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 2),
- BPF_ALU32_IMM(BPF_MOV, R1, 1),
- BPF_ALU64_REG(BPF_RSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU64_RSH_X: 0x80000000 >> 31 = 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x80000000),
- BPF_ALU32_IMM(BPF_MOV, R1, 31),
- BPF_ALU64_REG(BPF_RSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU64_RSH_X: Shift < 32, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 12),
- BPF_ALU64_REG(BPF_RSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x56789abc } }
- },
- {
- "ALU64_RSH_X: Shift < 32, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 12),
- BPF_ALU64_REG(BPF_RSH, R0, R1),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x00081234 } }
- },
- {
- "ALU64_RSH_X: Shift > 32, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 36),
- BPF_ALU64_REG(BPF_RSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x08123456 } }
- },
- {
- "ALU64_RSH_X: Shift > 32, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 36),
- BPF_ALU64_REG(BPF_RSH, R0, R1),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } }
- },
- {
- "ALU64_RSH_X: Shift == 32, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 32),
- BPF_ALU64_REG(BPF_RSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x81234567 } }
- },
- {
- "ALU64_RSH_X: Shift == 32, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 32),
- BPF_ALU64_REG(BPF_RSH, R0, R1),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } }
- },
- {
- "ALU64_RSH_X: Zero shift, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 0),
- BPF_ALU64_REG(BPF_RSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x89abcdef } }
- },
- {
- "ALU64_RSH_X: Zero shift, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 0),
- BPF_ALU64_REG(BPF_RSH, R0, R1),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x81234567 } }
- },
- /* BPF_ALU | BPF_RSH | BPF_K */
- {
- "ALU_RSH_K: 2 >> 1 = 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 2),
- BPF_ALU32_IMM(BPF_RSH, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU_RSH_K: 0x80000000 >> 31 = 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x80000000),
- BPF_ALU32_IMM(BPF_RSH, R0, 31),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU_RSH_K: 0x12345678 >> 20 = 0x123",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
- BPF_ALU32_IMM(BPF_RSH, R0, 20),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x123 } }
- },
- {
- "ALU_RSH_K: 0x12345678 >> 0 = 0x12345678",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
- BPF_ALU32_IMM(BPF_RSH, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x12345678 } }
- },
- {
- "ALU64_RSH_K: 2 >> 1 = 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 2),
- BPF_ALU64_IMM(BPF_RSH, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU64_RSH_K: 0x80000000 >> 31 = 1",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x80000000),
- BPF_ALU64_IMM(BPF_RSH, R0, 31),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "ALU64_RSH_K: Shift < 32, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU64_IMM(BPF_RSH, R0, 12),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x56789abc } }
- },
- {
- "ALU64_RSH_K: Shift < 32, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU64_IMM(BPF_RSH, R0, 12),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x00081234 } }
- },
- {
- "ALU64_RSH_K: Shift > 32, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU64_IMM(BPF_RSH, R0, 36),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x08123456 } }
- },
- {
- "ALU64_RSH_K: Shift > 32, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU64_IMM(BPF_RSH, R0, 36),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } }
- },
- {
- "ALU64_RSH_K: Shift == 32, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x81234567 } }
- },
- {
- "ALU64_RSH_K: Shift == 32, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } }
- },
- {
- "ALU64_RSH_K: Zero shift",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ALU64_IMM(BPF_RSH, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x89abcdef } }
- },
- /* BPF_ALU | BPF_ARSH | BPF_X */
- {
- "ALU32_ARSH_X: -1234 >> 7 = -10",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -1234),
- BPF_ALU32_IMM(BPF_MOV, R1, 7),
- BPF_ALU32_REG(BPF_ARSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -10 } }
- },
- {
- "ALU64_ARSH_X: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
- BPF_ALU32_IMM(BPF_MOV, R1, 40),
- BPF_ALU64_REG(BPF_ARSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xffff00ff } },
- },
- {
- "ALU64_ARSH_X: Shift < 32, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 12),
- BPF_ALU64_REG(BPF_ARSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x56789abc } }
- },
- {
- "ALU64_ARSH_X: Shift < 32, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 12),
- BPF_ALU64_REG(BPF_ARSH, R0, R1),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xfff81234 } }
- },
- {
- "ALU64_ARSH_X: Shift > 32, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 36),
- BPF_ALU64_REG(BPF_ARSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xf8123456 } }
- },
- {
- "ALU64_ARSH_X: Shift > 32, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 36),
- BPF_ALU64_REG(BPF_ARSH, R0, R1),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -1 } }
- },
- {
- "ALU64_ARSH_X: Shift == 32, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 32),
- BPF_ALU64_REG(BPF_ARSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x81234567 } }
- },
- {
- "ALU64_ARSH_X: Shift == 32, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 32),
- BPF_ALU64_REG(BPF_ARSH, R0, R1),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -1 } }
- },
- {
- "ALU64_ARSH_X: Zero shift, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 0),
- BPF_ALU64_REG(BPF_ARSH, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x89abcdef } }
- },
- {
- "ALU64_ARSH_X: Zero shift, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU32_IMM(BPF_MOV, R1, 0),
- BPF_ALU64_REG(BPF_ARSH, R0, R1),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x81234567 } }
- },
- /* BPF_ALU | BPF_ARSH | BPF_K */
- {
- "ALU32_ARSH_K: -1234 >> 7 = -10",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -1234),
- BPF_ALU32_IMM(BPF_ARSH, R0, 7),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -10 } }
- },
- {
- "ALU32_ARSH_K: -1234 >> 0 = -1234",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -1234),
- BPF_ALU32_IMM(BPF_ARSH, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -1234 } }
- },
- {
- "ALU64_ARSH_K: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
- BPF_ALU64_IMM(BPF_ARSH, R0, 40),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xffff00ff } },
- },
- {
- "ALU64_ARSH_K: Shift < 32, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU64_IMM(BPF_RSH, R0, 12),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x56789abc } }
- },
- {
- "ALU64_ARSH_K: Shift < 32, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU64_IMM(BPF_ARSH, R0, 12),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xfff81234 } }
- },
- {
- "ALU64_ARSH_K: Shift > 32, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU64_IMM(BPF_ARSH, R0, 36),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xf8123456 } }
- },
- {
- "ALU64_ARSH_K: Shift > 32, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0xf123456789abcdefLL),
- BPF_ALU64_IMM(BPF_ARSH, R0, 36),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -1 } }
- },
- {
- "ALU64_ARSH_K: Shift == 32, low word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU64_IMM(BPF_ARSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x81234567 } }
- },
- {
- "ALU64_ARSH_K: Shift == 32, high word",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU64_IMM(BPF_ARSH, R0, 32),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -1 } }
- },
- {
- "ALU64_ARSH_K: Zero shift",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
- BPF_ALU64_IMM(BPF_ARSH, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x89abcdef } }
- },
- /* BPF_ALU | BPF_NEG */
- {
- "ALU_NEG: -(3) = -3",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 3),
- BPF_ALU32_IMM(BPF_NEG, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -3 } },
- },
- {
- "ALU_NEG: -(-3) = 3",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -3),
- BPF_ALU32_IMM(BPF_NEG, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- {
- "ALU64_NEG: -(3) = -3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 3),
- BPF_ALU64_IMM(BPF_NEG, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -3 } },
- },
- {
- "ALU64_NEG: -(-3) = 3",
- .u.insns_int = {
- BPF_LD_IMM64(R0, -3),
- BPF_ALU64_IMM(BPF_NEG, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 3 } },
- },
- /* BPF_ALU | BPF_END | BPF_FROM_BE */
- {
- "ALU_END_FROM_BE 16: 0x0123456789abcdef -> 0xcdef",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ENDIAN(BPF_FROM_BE, R0, 16),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, cpu_to_be16(0xcdef) } },
- },
- {
- "ALU_END_FROM_BE 32: 0x0123456789abcdef -> 0x89abcdef",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ENDIAN(BPF_FROM_BE, R0, 32),
- BPF_ALU64_REG(BPF_MOV, R1, R0),
- BPF_ALU64_IMM(BPF_RSH, R1, 32),
- BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, cpu_to_be32(0x89abcdef) } },
- },
- {
- "ALU_END_FROM_BE 64: 0x0123456789abcdef -> 0x89abcdef",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ENDIAN(BPF_FROM_BE, R0, 64),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, (u32) cpu_to_be64(0x0123456789abcdefLL) } },
- },
- {
- "ALU_END_FROM_BE 64: 0x0123456789abcdef >> 32 -> 0x01234567",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ENDIAN(BPF_FROM_BE, R0, 64),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, (u32) (cpu_to_be64(0x0123456789abcdefLL) >> 32) } },
- },
- /* BPF_ALU | BPF_END | BPF_FROM_BE, reversed */
- {
- "ALU_END_FROM_BE 16: 0xfedcba9876543210 -> 0x3210",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
- BPF_ENDIAN(BPF_FROM_BE, R0, 16),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, cpu_to_be16(0x3210) } },
- },
- {
- "ALU_END_FROM_BE 32: 0xfedcba9876543210 -> 0x76543210",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
- BPF_ENDIAN(BPF_FROM_BE, R0, 32),
- BPF_ALU64_REG(BPF_MOV, R1, R0),
- BPF_ALU64_IMM(BPF_RSH, R1, 32),
- BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, cpu_to_be32(0x76543210) } },
- },
- {
- "ALU_END_FROM_BE 64: 0xfedcba9876543210 -> 0x76543210",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
- BPF_ENDIAN(BPF_FROM_BE, R0, 64),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, (u32) cpu_to_be64(0xfedcba9876543210ULL) } },
- },
- {
- "ALU_END_FROM_BE 64: 0xfedcba9876543210 >> 32 -> 0xfedcba98",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
- BPF_ENDIAN(BPF_FROM_BE, R0, 64),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, (u32) (cpu_to_be64(0xfedcba9876543210ULL) >> 32) } },
- },
- /* BPF_ALU | BPF_END | BPF_FROM_LE */
- {
- "ALU_END_FROM_LE 16: 0x0123456789abcdef -> 0xefcd",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ENDIAN(BPF_FROM_LE, R0, 16),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, cpu_to_le16(0xcdef) } },
- },
- {
- "ALU_END_FROM_LE 32: 0x0123456789abcdef -> 0xefcdab89",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ENDIAN(BPF_FROM_LE, R0, 32),
- BPF_ALU64_REG(BPF_MOV, R1, R0),
- BPF_ALU64_IMM(BPF_RSH, R1, 32),
- BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, cpu_to_le32(0x89abcdef) } },
- },
- {
- "ALU_END_FROM_LE 64: 0x0123456789abcdef -> 0x67452301",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ENDIAN(BPF_FROM_LE, R0, 64),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, (u32) cpu_to_le64(0x0123456789abcdefLL) } },
- },
- {
- "ALU_END_FROM_LE 64: 0x0123456789abcdef >> 32 -> 0xefcdab89",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
- BPF_ENDIAN(BPF_FROM_LE, R0, 64),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, (u32) (cpu_to_le64(0x0123456789abcdefLL) >> 32) } },
- },
- /* BPF_ALU | BPF_END | BPF_FROM_LE, reversed */
- {
- "ALU_END_FROM_LE 16: 0xfedcba9876543210 -> 0x1032",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
- BPF_ENDIAN(BPF_FROM_LE, R0, 16),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, cpu_to_le16(0x3210) } },
- },
- {
- "ALU_END_FROM_LE 32: 0xfedcba9876543210 -> 0x10325476",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
- BPF_ENDIAN(BPF_FROM_LE, R0, 32),
- BPF_ALU64_REG(BPF_MOV, R1, R0),
- BPF_ALU64_IMM(BPF_RSH, R1, 32),
- BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, cpu_to_le32(0x76543210) } },
- },
- {
- "ALU_END_FROM_LE 64: 0xfedcba9876543210 -> 0x10325476",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
- BPF_ENDIAN(BPF_FROM_LE, R0, 64),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, (u32) cpu_to_le64(0xfedcba9876543210ULL) } },
- },
- {
- "ALU_END_FROM_LE 64: 0xfedcba9876543210 >> 32 -> 0x98badcfe",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
- BPF_ENDIAN(BPF_FROM_LE, R0, 64),
- BPF_ALU64_IMM(BPF_RSH, R0, 32),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, (u32) (cpu_to_le64(0xfedcba9876543210ULL) >> 32) } },
- },
- /* BPF_LDX_MEM B/H/W/DW */
- {
- "BPF_LDX_MEM | BPF_B, base",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x0102030405060708ULL),
- BPF_LD_IMM64(R2, 0x0000000000000008ULL),
- BPF_STX_MEM(BPF_DW, R10, R1, -8),
- #ifdef __BIG_ENDIAN
- BPF_LDX_MEM(BPF_B, R0, R10, -1),
- #else
- BPF_LDX_MEM(BPF_B, R0, R10, -8),
- #endif
- BPF_JMP_REG(BPF_JNE, R0, R2, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- .stack_depth = 8,
- },
- {
- "BPF_LDX_MEM | BPF_B, MSB set",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x8182838485868788ULL),
- BPF_LD_IMM64(R2, 0x0000000000000088ULL),
- BPF_STX_MEM(BPF_DW, R10, R1, -8),
- #ifdef __BIG_ENDIAN
- BPF_LDX_MEM(BPF_B, R0, R10, -1),
- #else
- BPF_LDX_MEM(BPF_B, R0, R10, -8),
- #endif
- BPF_JMP_REG(BPF_JNE, R0, R2, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- .stack_depth = 8,
- },
- {
- "BPF_LDX_MEM | BPF_B, negative offset",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x8182838485868788ULL),
- BPF_LD_IMM64(R3, 0x0000000000000088ULL),
- BPF_ALU64_IMM(BPF_ADD, R1, 512),
- BPF_STX_MEM(BPF_B, R1, R2, -256),
- BPF_LDX_MEM(BPF_B, R0, R1, -256),
- BPF_JMP_REG(BPF_JNE, R0, R3, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_LARGE_MEM,
- { },
- { { 512, 0 } },
- .stack_depth = 0,
- },
- {
- "BPF_LDX_MEM | BPF_B, small positive offset",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x8182838485868788ULL),
- BPF_LD_IMM64(R3, 0x0000000000000088ULL),
- BPF_STX_MEM(BPF_B, R1, R2, 256),
- BPF_LDX_MEM(BPF_B, R0, R1, 256),
- BPF_JMP_REG(BPF_JNE, R0, R3, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_LARGE_MEM,
- { },
- { { 512, 0 } },
- .stack_depth = 0,
- },
- {
- "BPF_LDX_MEM | BPF_B, large positive offset",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x8182838485868788ULL),
- BPF_LD_IMM64(R3, 0x0000000000000088ULL),
- BPF_STX_MEM(BPF_B, R1, R2, 4096),
- BPF_LDX_MEM(BPF_B, R0, R1, 4096),
- BPF_JMP_REG(BPF_JNE, R0, R3, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_LARGE_MEM,
- { },
- { { 4096 + 16, 0 } },
- .stack_depth = 0,
- },
- {
- "BPF_LDX_MEM | BPF_H, base",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x0102030405060708ULL),
- BPF_LD_IMM64(R2, 0x0000000000000708ULL),
- BPF_STX_MEM(BPF_DW, R10, R1, -8),
- #ifdef __BIG_ENDIAN
- BPF_LDX_MEM(BPF_H, R0, R10, -2),
- #else
- BPF_LDX_MEM(BPF_H, R0, R10, -8),
- #endif
- BPF_JMP_REG(BPF_JNE, R0, R2, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- .stack_depth = 8,
- },
- {
- "BPF_LDX_MEM | BPF_H, MSB set",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x8182838485868788ULL),
- BPF_LD_IMM64(R2, 0x0000000000008788ULL),
- BPF_STX_MEM(BPF_DW, R10, R1, -8),
- #ifdef __BIG_ENDIAN
- BPF_LDX_MEM(BPF_H, R0, R10, -2),
- #else
- BPF_LDX_MEM(BPF_H, R0, R10, -8),
- #endif
- BPF_JMP_REG(BPF_JNE, R0, R2, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- .stack_depth = 8,
- },
- {
- "BPF_LDX_MEM | BPF_H, negative offset",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x8182838485868788ULL),
- BPF_LD_IMM64(R3, 0x0000000000008788ULL),
- BPF_ALU64_IMM(BPF_ADD, R1, 512),
- BPF_STX_MEM(BPF_H, R1, R2, -256),
- BPF_LDX_MEM(BPF_H, R0, R1, -256),
- BPF_JMP_REG(BPF_JNE, R0, R3, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_LARGE_MEM,
- { },
- { { 512, 0 } },
- .stack_depth = 0,
- },
- {
- "BPF_LDX_MEM | BPF_H, small positive offset",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x8182838485868788ULL),
- BPF_LD_IMM64(R3, 0x0000000000008788ULL),
- BPF_STX_MEM(BPF_H, R1, R2, 256),
- BPF_LDX_MEM(BPF_H, R0, R1, 256),
- BPF_JMP_REG(BPF_JNE, R0, R3, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_LARGE_MEM,
- { },
- { { 512, 0 } },
- .stack_depth = 0,
- },
- {
- "BPF_LDX_MEM | BPF_H, large positive offset",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x8182838485868788ULL),
- BPF_LD_IMM64(R3, 0x0000000000008788ULL),
- BPF_STX_MEM(BPF_H, R1, R2, 8192),
- BPF_LDX_MEM(BPF_H, R0, R1, 8192),
- BPF_JMP_REG(BPF_JNE, R0, R3, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_LARGE_MEM,
- { },
- { { 8192 + 16, 0 } },
- .stack_depth = 0,
- },
- {
- "BPF_LDX_MEM | BPF_H, unaligned positive offset",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x8182838485868788ULL),
- BPF_LD_IMM64(R3, 0x0000000000008788ULL),
- BPF_STX_MEM(BPF_H, R1, R2, 13),
- BPF_LDX_MEM(BPF_H, R0, R1, 13),
- BPF_JMP_REG(BPF_JNE, R0, R3, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_LARGE_MEM,
- { },
- { { 32, 0 } },
- .stack_depth = 0,
- },
- {
- "BPF_LDX_MEM | BPF_W, base",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x0102030405060708ULL),
- BPF_LD_IMM64(R2, 0x0000000005060708ULL),
- BPF_STX_MEM(BPF_DW, R10, R1, -8),
- #ifdef __BIG_ENDIAN
- BPF_LDX_MEM(BPF_W, R0, R10, -4),
- #else
- BPF_LDX_MEM(BPF_W, R0, R10, -8),
- #endif
- BPF_JMP_REG(BPF_JNE, R0, R2, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- .stack_depth = 8,
- },
- {
- "BPF_LDX_MEM | BPF_W, MSB set",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x8182838485868788ULL),
- BPF_LD_IMM64(R2, 0x0000000085868788ULL),
- BPF_STX_MEM(BPF_DW, R10, R1, -8),
- #ifdef __BIG_ENDIAN
- BPF_LDX_MEM(BPF_W, R0, R10, -4),
- #else
- BPF_LDX_MEM(BPF_W, R0, R10, -8),
- #endif
- BPF_JMP_REG(BPF_JNE, R0, R2, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- .stack_depth = 8,
- },
- {
- "BPF_LDX_MEM | BPF_W, negative offset",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x8182838485868788ULL),
- BPF_LD_IMM64(R3, 0x0000000085868788ULL),
- BPF_ALU64_IMM(BPF_ADD, R1, 512),
- BPF_STX_MEM(BPF_W, R1, R2, -256),
- BPF_LDX_MEM(BPF_W, R0, R1, -256),
- BPF_JMP_REG(BPF_JNE, R0, R3, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_LARGE_MEM,
- { },
- { { 512, 0 } },
- .stack_depth = 0,
- },
- {
- "BPF_LDX_MEM | BPF_W, small positive offset",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x8182838485868788ULL),
- BPF_LD_IMM64(R3, 0x0000000085868788ULL),
- BPF_STX_MEM(BPF_W, R1, R2, 256),
- BPF_LDX_MEM(BPF_W, R0, R1, 256),
- BPF_JMP_REG(BPF_JNE, R0, R3, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_LARGE_MEM,
- { },
- { { 512, 0 } },
- .stack_depth = 0,
- },
- {
- "BPF_LDX_MEM | BPF_W, large positive offset",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x8182838485868788ULL),
- BPF_LD_IMM64(R3, 0x0000000085868788ULL),
- BPF_STX_MEM(BPF_W, R1, R2, 16384),
- BPF_LDX_MEM(BPF_W, R0, R1, 16384),
- BPF_JMP_REG(BPF_JNE, R0, R3, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_LARGE_MEM,
- { },
- { { 16384 + 16, 0 } },
- .stack_depth = 0,
- },
- {
- "BPF_LDX_MEM | BPF_W, unaligned positive offset",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x8182838485868788ULL),
- BPF_LD_IMM64(R3, 0x0000000085868788ULL),
- BPF_STX_MEM(BPF_W, R1, R2, 13),
- BPF_LDX_MEM(BPF_W, R0, R1, 13),
- BPF_JMP_REG(BPF_JNE, R0, R3, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_LARGE_MEM,
- { },
- { { 32, 0 } },
- .stack_depth = 0,
- },
- {
- "BPF_LDX_MEM | BPF_DW, base",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x0102030405060708ULL),
- BPF_STX_MEM(BPF_DW, R10, R1, -8),
- BPF_LDX_MEM(BPF_DW, R0, R10, -8),
- BPF_JMP_REG(BPF_JNE, R0, R1, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- .stack_depth = 8,
- },
- {
- "BPF_LDX_MEM | BPF_DW, MSB set",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x8182838485868788ULL),
- BPF_STX_MEM(BPF_DW, R10, R1, -8),
- BPF_LDX_MEM(BPF_DW, R0, R10, -8),
- BPF_JMP_REG(BPF_JNE, R0, R1, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- .stack_depth = 8,
- },
- {
- "BPF_LDX_MEM | BPF_DW, negative offset",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x8182838485868788ULL),
- BPF_ALU64_IMM(BPF_ADD, R1, 512),
- BPF_STX_MEM(BPF_DW, R1, R2, -256),
- BPF_LDX_MEM(BPF_DW, R0, R1, -256),
- BPF_JMP_REG(BPF_JNE, R0, R2, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_LARGE_MEM,
- { },
- { { 512, 0 } },
- .stack_depth = 0,
- },
- {
- "BPF_LDX_MEM | BPF_DW, small positive offset",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x8182838485868788ULL),
- BPF_STX_MEM(BPF_DW, R1, R2, 256),
- BPF_LDX_MEM(BPF_DW, R0, R1, 256),
- BPF_JMP_REG(BPF_JNE, R0, R2, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_LARGE_MEM,
- { },
- { { 512, 0 } },
- .stack_depth = 8,
- },
- {
- "BPF_LDX_MEM | BPF_DW, large positive offset",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x8182838485868788ULL),
- BPF_STX_MEM(BPF_DW, R1, R2, 32760),
- BPF_LDX_MEM(BPF_DW, R0, R1, 32760),
- BPF_JMP_REG(BPF_JNE, R0, R2, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_LARGE_MEM,
- { },
- { { 32768, 0 } },
- .stack_depth = 0,
- },
- {
- "BPF_LDX_MEM | BPF_DW, unaligned positive offset",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0x8182838485868788ULL),
- BPF_STX_MEM(BPF_DW, R1, R2, 13),
- BPF_LDX_MEM(BPF_DW, R0, R1, 13),
- BPF_JMP_REG(BPF_JNE, R0, R2, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_LARGE_MEM,
- { },
- { { 32, 0 } },
- .stack_depth = 0,
- },
- /* BPF_STX_MEM B/H/W/DW */
- {
- "BPF_STX_MEM | BPF_B",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
- BPF_LD_IMM64(R2, 0x0102030405060708ULL),
- BPF_LD_IMM64(R3, 0x8090a0b0c0d0e008ULL),
- BPF_STX_MEM(BPF_DW, R10, R1, -8),
- #ifdef __BIG_ENDIAN
- BPF_STX_MEM(BPF_B, R10, R2, -1),
- #else
- BPF_STX_MEM(BPF_B, R10, R2, -8),
- #endif
- BPF_LDX_MEM(BPF_DW, R0, R10, -8),
- BPF_JMP_REG(BPF_JNE, R0, R3, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- .stack_depth = 8,
- },
- {
- "BPF_STX_MEM | BPF_B, MSB set",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
- BPF_LD_IMM64(R2, 0x8182838485868788ULL),
- BPF_LD_IMM64(R3, 0x8090a0b0c0d0e088ULL),
- BPF_STX_MEM(BPF_DW, R10, R1, -8),
- #ifdef __BIG_ENDIAN
- BPF_STX_MEM(BPF_B, R10, R2, -1),
- #else
- BPF_STX_MEM(BPF_B, R10, R2, -8),
- #endif
- BPF_LDX_MEM(BPF_DW, R0, R10, -8),
- BPF_JMP_REG(BPF_JNE, R0, R3, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- .stack_depth = 8,
- },
- {
- "BPF_STX_MEM | BPF_H",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
- BPF_LD_IMM64(R2, 0x0102030405060708ULL),
- BPF_LD_IMM64(R3, 0x8090a0b0c0d00708ULL),
- BPF_STX_MEM(BPF_DW, R10, R1, -8),
- #ifdef __BIG_ENDIAN
- BPF_STX_MEM(BPF_H, R10, R2, -2),
- #else
- BPF_STX_MEM(BPF_H, R10, R2, -8),
- #endif
- BPF_LDX_MEM(BPF_DW, R0, R10, -8),
- BPF_JMP_REG(BPF_JNE, R0, R3, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- .stack_depth = 8,
- },
- {
- "BPF_STX_MEM | BPF_H, MSB set",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
- BPF_LD_IMM64(R2, 0x8182838485868788ULL),
- BPF_LD_IMM64(R3, 0x8090a0b0c0d08788ULL),
- BPF_STX_MEM(BPF_DW, R10, R1, -8),
- #ifdef __BIG_ENDIAN
- BPF_STX_MEM(BPF_H, R10, R2, -2),
- #else
- BPF_STX_MEM(BPF_H, R10, R2, -8),
- #endif
- BPF_LDX_MEM(BPF_DW, R0, R10, -8),
- BPF_JMP_REG(BPF_JNE, R0, R3, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- .stack_depth = 8,
- },
- {
- "BPF_STX_MEM | BPF_W",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
- BPF_LD_IMM64(R2, 0x0102030405060708ULL),
- BPF_LD_IMM64(R3, 0x8090a0b005060708ULL),
- BPF_STX_MEM(BPF_DW, R10, R1, -8),
- #ifdef __BIG_ENDIAN
- BPF_STX_MEM(BPF_W, R10, R2, -4),
- #else
- BPF_STX_MEM(BPF_W, R10, R2, -8),
- #endif
- BPF_LDX_MEM(BPF_DW, R0, R10, -8),
- BPF_JMP_REG(BPF_JNE, R0, R3, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- .stack_depth = 8,
- },
- {
- "BPF_STX_MEM | BPF_W, MSB set",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
- BPF_LD_IMM64(R2, 0x8182838485868788ULL),
- BPF_LD_IMM64(R3, 0x8090a0b085868788ULL),
- BPF_STX_MEM(BPF_DW, R10, R1, -8),
- #ifdef __BIG_ENDIAN
- BPF_STX_MEM(BPF_W, R10, R2, -4),
- #else
- BPF_STX_MEM(BPF_W, R10, R2, -8),
- #endif
- BPF_LDX_MEM(BPF_DW, R0, R10, -8),
- BPF_JMP_REG(BPF_JNE, R0, R3, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- .stack_depth = 8,
- },
- /* BPF_ST(X) | BPF_MEM | BPF_B/H/W/DW */
- {
- "ST_MEM_B: Store/Load byte: max negative",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_ST_MEM(BPF_B, R10, -40, 0xff),
- BPF_LDX_MEM(BPF_B, R0, R10, -40),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xff } },
- .stack_depth = 40,
- },
- {
- "ST_MEM_B: Store/Load byte: max positive",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_ST_MEM(BPF_H, R10, -40, 0x7f),
- BPF_LDX_MEM(BPF_H, R0, R10, -40),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x7f } },
- .stack_depth = 40,
- },
- {
- "STX_MEM_B: Store/Load byte: max negative",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0),
- BPF_LD_IMM64(R1, 0xffLL),
- BPF_STX_MEM(BPF_B, R10, R1, -40),
- BPF_LDX_MEM(BPF_B, R0, R10, -40),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xff } },
- .stack_depth = 40,
- },
- {
- "ST_MEM_H: Store/Load half word: max negative",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_ST_MEM(BPF_H, R10, -40, 0xffff),
- BPF_LDX_MEM(BPF_H, R0, R10, -40),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xffff } },
- .stack_depth = 40,
- },
- {
- "ST_MEM_H: Store/Load half word: max positive",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_ST_MEM(BPF_H, R10, -40, 0x7fff),
- BPF_LDX_MEM(BPF_H, R0, R10, -40),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x7fff } },
- .stack_depth = 40,
- },
- {
- "STX_MEM_H: Store/Load half word: max negative",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0),
- BPF_LD_IMM64(R1, 0xffffLL),
- BPF_STX_MEM(BPF_H, R10, R1, -40),
- BPF_LDX_MEM(BPF_H, R0, R10, -40),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xffff } },
- .stack_depth = 40,
- },
- {
- "ST_MEM_W: Store/Load word: max negative",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_ST_MEM(BPF_W, R10, -40, 0xffffffff),
- BPF_LDX_MEM(BPF_W, R0, R10, -40),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xffffffff } },
- .stack_depth = 40,
- },
- {
- "ST_MEM_W: Store/Load word: max positive",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_ST_MEM(BPF_W, R10, -40, 0x7fffffff),
- BPF_LDX_MEM(BPF_W, R0, R10, -40),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x7fffffff } },
- .stack_depth = 40,
- },
- {
- "STX_MEM_W: Store/Load word: max negative",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0),
- BPF_LD_IMM64(R1, 0xffffffffLL),
- BPF_STX_MEM(BPF_W, R10, R1, -40),
- BPF_LDX_MEM(BPF_W, R0, R10, -40),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xffffffff } },
- .stack_depth = 40,
- },
- {
- "ST_MEM_DW: Store/Load double word: max negative",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
- BPF_LDX_MEM(BPF_DW, R0, R10, -40),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xffffffff } },
- .stack_depth = 40,
- },
- {
- "ST_MEM_DW: Store/Load double word: max negative 2",
- .u.insns_int = {
- BPF_LD_IMM64(R2, 0xffff00000000ffffLL),
- BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
- BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
- BPF_LDX_MEM(BPF_DW, R2, R10, -40),
- BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
- BPF_MOV32_IMM(R0, 2),
- BPF_EXIT_INSN(),
- BPF_MOV32_IMM(R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x1 } },
- .stack_depth = 40,
- },
- {
- "ST_MEM_DW: Store/Load double word: max positive",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_ST_MEM(BPF_DW, R10, -40, 0x7fffffff),
- BPF_LDX_MEM(BPF_DW, R0, R10, -40),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x7fffffff } },
- .stack_depth = 40,
- },
- {
- "STX_MEM_DW: Store/Load double word: max negative",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0),
- BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
- BPF_STX_MEM(BPF_DW, R10, R1, -40),
- BPF_LDX_MEM(BPF_DW, R0, R10, -40),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xffffffff } },
- .stack_depth = 40,
- },
- {
- "STX_MEM_DW: Store double word: first word in memory",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0),
- BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
- BPF_STX_MEM(BPF_DW, R10, R1, -40),
- BPF_LDX_MEM(BPF_W, R0, R10, -40),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- #ifdef __BIG_ENDIAN
- { { 0, 0x01234567 } },
- #else
- { { 0, 0x89abcdef } },
- #endif
- .stack_depth = 40,
- },
- {
- "STX_MEM_DW: Store double word: second word in memory",
- .u.insns_int = {
- BPF_LD_IMM64(R0, 0),
- BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
- BPF_STX_MEM(BPF_DW, R10, R1, -40),
- BPF_LDX_MEM(BPF_W, R0, R10, -36),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- #ifdef __BIG_ENDIAN
- { { 0, 0x89abcdef } },
- #else
- { { 0, 0x01234567 } },
- #endif
- .stack_depth = 40,
- },
- /* BPF_STX | BPF_ATOMIC | BPF_W/DW */
- {
- "STX_XADD_W: X + 1 + 1 + 1 + ...",
- { },
- INTERNAL,
- { },
- { { 0, 4134 } },
- .fill_helper = bpf_fill_stxw,
- },
- {
- "STX_XADD_DW: X + 1 + 1 + 1 + ...",
- { },
- INTERNAL,
- { },
- { { 0, 4134 } },
- .fill_helper = bpf_fill_stxdw,
- },
- /*
- * Exhaustive tests of atomic operation variants.
- * Individual tests are expanded from template macros for all
- * combinations of ALU operation, word size and fetching.
- */
- #define BPF_ATOMIC_POISON(width) ((width) == BPF_W ? (0xbaadf00dULL << 32) : 0)
- #define BPF_ATOMIC_OP_TEST1(width, op, logic, old, update, result) \
- { \
- "BPF_ATOMIC | " #width ", " #op ": Test: " \
- #old " " #logic " " #update " = " #result, \
- .u.insns_int = { \
- BPF_LD_IMM64(R5, (update) | BPF_ATOMIC_POISON(width)), \
- BPF_ST_MEM(width, R10, -40, old), \
- BPF_ATOMIC_OP(width, op, R10, R5, -40), \
- BPF_LDX_MEM(width, R0, R10, -40), \
- BPF_ALU64_REG(BPF_MOV, R1, R0), \
- BPF_ALU64_IMM(BPF_RSH, R1, 32), \
- BPF_ALU64_REG(BPF_OR, R0, R1), \
- BPF_EXIT_INSN(), \
- }, \
- INTERNAL, \
- { }, \
- { { 0, result } }, \
- .stack_depth = 40, \
- }
- #define BPF_ATOMIC_OP_TEST2(width, op, logic, old, update, result) \
- { \
- "BPF_ATOMIC | " #width ", " #op ": Test side effects, r10: " \
- #old " " #logic " " #update " = " #result, \
- .u.insns_int = { \
- BPF_ALU64_REG(BPF_MOV, R1, R10), \
- BPF_LD_IMM64(R0, (update) | BPF_ATOMIC_POISON(width)), \
- BPF_ST_MEM(BPF_W, R10, -40, old), \
- BPF_ATOMIC_OP(width, op, R10, R0, -40), \
- BPF_ALU64_REG(BPF_MOV, R0, R10), \
- BPF_ALU64_REG(BPF_SUB, R0, R1), \
- BPF_ALU64_REG(BPF_MOV, R1, R0), \
- BPF_ALU64_IMM(BPF_RSH, R1, 32), \
- BPF_ALU64_REG(BPF_OR, R0, R1), \
- BPF_EXIT_INSN(), \
- }, \
- INTERNAL, \
- { }, \
- { { 0, 0 } }, \
- .stack_depth = 40, \
- }
- #define BPF_ATOMIC_OP_TEST3(width, op, logic, old, update, result) \
- { \
- "BPF_ATOMIC | " #width ", " #op ": Test side effects, r0: " \
- #old " " #logic " " #update " = " #result, \
- .u.insns_int = { \
- BPF_ALU64_REG(BPF_MOV, R0, R10), \
- BPF_LD_IMM64(R1, (update) | BPF_ATOMIC_POISON(width)), \
- BPF_ST_MEM(width, R10, -40, old), \
- BPF_ATOMIC_OP(width, op, R10, R1, -40), \
- BPF_ALU64_REG(BPF_SUB, R0, R10), \
- BPF_ALU64_REG(BPF_MOV, R1, R0), \
- BPF_ALU64_IMM(BPF_RSH, R1, 32), \
- BPF_ALU64_REG(BPF_OR, R0, R1), \
- BPF_EXIT_INSN(), \
- }, \
- INTERNAL, \
- { }, \
- { { 0, 0 } }, \
- .stack_depth = 40, \
- }
- #define BPF_ATOMIC_OP_TEST4(width, op, logic, old, update, result) \
- { \
- "BPF_ATOMIC | " #width ", " #op ": Test fetch: " \
- #old " " #logic " " #update " = " #result, \
- .u.insns_int = { \
- BPF_LD_IMM64(R3, (update) | BPF_ATOMIC_POISON(width)), \
- BPF_ST_MEM(width, R10, -40, old), \
- BPF_ATOMIC_OP(width, op, R10, R3, -40), \
- BPF_ALU32_REG(BPF_MOV, R0, R3), \
- BPF_EXIT_INSN(), \
- }, \
- INTERNAL, \
- { }, \
- { { 0, (op) & BPF_FETCH ? old : update } }, \
- .stack_depth = 40, \
- }
- /* BPF_ATOMIC | BPF_W: BPF_ADD */
- BPF_ATOMIC_OP_TEST1(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
- BPF_ATOMIC_OP_TEST2(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
- BPF_ATOMIC_OP_TEST3(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
- BPF_ATOMIC_OP_TEST4(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
- /* BPF_ATOMIC | BPF_W: BPF_ADD | BPF_FETCH */
- BPF_ATOMIC_OP_TEST1(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
- BPF_ATOMIC_OP_TEST2(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
- BPF_ATOMIC_OP_TEST3(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
- BPF_ATOMIC_OP_TEST4(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
- /* BPF_ATOMIC | BPF_DW: BPF_ADD */
- BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
- BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
- BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
- BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
- /* BPF_ATOMIC | BPF_DW: BPF_ADD | BPF_FETCH */
- BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
- BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
- BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
- BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
- /* BPF_ATOMIC | BPF_W: BPF_AND */
- BPF_ATOMIC_OP_TEST1(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
- BPF_ATOMIC_OP_TEST2(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
- BPF_ATOMIC_OP_TEST3(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
- BPF_ATOMIC_OP_TEST4(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
- /* BPF_ATOMIC | BPF_W: BPF_AND | BPF_FETCH */
- BPF_ATOMIC_OP_TEST1(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
- BPF_ATOMIC_OP_TEST2(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
- BPF_ATOMIC_OP_TEST3(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
- BPF_ATOMIC_OP_TEST4(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
- /* BPF_ATOMIC | BPF_DW: BPF_AND */
- BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
- BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
- BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
- BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
- /* BPF_ATOMIC | BPF_DW: BPF_AND | BPF_FETCH */
- BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
- BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
- BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
- BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
- /* BPF_ATOMIC | BPF_W: BPF_OR */
- BPF_ATOMIC_OP_TEST1(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
- BPF_ATOMIC_OP_TEST2(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
- BPF_ATOMIC_OP_TEST3(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
- BPF_ATOMIC_OP_TEST4(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
- /* BPF_ATOMIC | BPF_W: BPF_OR | BPF_FETCH */
- BPF_ATOMIC_OP_TEST1(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
- BPF_ATOMIC_OP_TEST2(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
- BPF_ATOMIC_OP_TEST3(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
- BPF_ATOMIC_OP_TEST4(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
- /* BPF_ATOMIC | BPF_DW: BPF_OR */
- BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
- BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
- BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
- BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
- /* BPF_ATOMIC | BPF_DW: BPF_OR | BPF_FETCH */
- BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
- BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
- BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
- BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
- /* BPF_ATOMIC | BPF_W: BPF_XOR */
- BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
- BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
- BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
- BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
- /* BPF_ATOMIC | BPF_W: BPF_XOR | BPF_FETCH */
- BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
- BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
- BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
- BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
- /* BPF_ATOMIC | BPF_DW: BPF_XOR */
- BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
- BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
- BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
- BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
- /* BPF_ATOMIC | BPF_DW: BPF_XOR | BPF_FETCH */
- BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
- BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
- BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
- BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
- /* BPF_ATOMIC | BPF_W: BPF_XCHG */
- BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
- BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
- BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
- BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
- /* BPF_ATOMIC | BPF_DW: BPF_XCHG */
- BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
- BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
- BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
- BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
- #undef BPF_ATOMIC_POISON
- #undef BPF_ATOMIC_OP_TEST1
- #undef BPF_ATOMIC_OP_TEST2
- #undef BPF_ATOMIC_OP_TEST3
- #undef BPF_ATOMIC_OP_TEST4
- /* BPF_ATOMIC | BPF_W, BPF_CMPXCHG */
- {
- "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful return",
- .u.insns_int = {
- BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
- BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
- BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
- BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x01234567 } },
- .stack_depth = 40,
- },
- {
- "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful store",
- .u.insns_int = {
- BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
- BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
- BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
- BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
- BPF_LDX_MEM(BPF_W, R0, R10, -40),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x89abcdef } },
- .stack_depth = 40,
- },
- {
- "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure return",
- .u.insns_int = {
- BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
- BPF_ALU32_IMM(BPF_MOV, R0, 0x76543210),
- BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
- BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x01234567 } },
- .stack_depth = 40,
- },
- {
- "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure store",
- .u.insns_int = {
- BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
- BPF_ALU32_IMM(BPF_MOV, R0, 0x76543210),
- BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
- BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
- BPF_LDX_MEM(BPF_W, R0, R10, -40),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x01234567 } },
- .stack_depth = 40,
- },
- {
- "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test side effects",
- .u.insns_int = {
- BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
- BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
- BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
- BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
- BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
- BPF_ALU32_REG(BPF_MOV, R0, R3),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x89abcdef } },
- .stack_depth = 40,
- },
- /* BPF_ATOMIC | BPF_DW, BPF_CMPXCHG */
- {
- "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful return",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
- BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
- BPF_ALU64_REG(BPF_MOV, R0, R1),
- BPF_STX_MEM(BPF_DW, R10, R1, -40),
- BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
- BPF_JMP_REG(BPF_JNE, R0, R1, 1),
- BPF_ALU64_REG(BPF_SUB, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- .stack_depth = 40,
- },
- {
- "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful store",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
- BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
- BPF_ALU64_REG(BPF_MOV, R0, R1),
- BPF_STX_MEM(BPF_DW, R10, R0, -40),
- BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
- BPF_LDX_MEM(BPF_DW, R0, R10, -40),
- BPF_JMP_REG(BPF_JNE, R0, R2, 1),
- BPF_ALU64_REG(BPF_SUB, R0, R2),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- .stack_depth = 40,
- },
- {
- "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure return",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
- BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
- BPF_ALU64_REG(BPF_MOV, R0, R1),
- BPF_ALU64_IMM(BPF_ADD, R0, 1),
- BPF_STX_MEM(BPF_DW, R10, R1, -40),
- BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
- BPF_JMP_REG(BPF_JNE, R0, R1, 1),
- BPF_ALU64_REG(BPF_SUB, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- .stack_depth = 40,
- },
- {
- "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure store",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
- BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
- BPF_ALU64_REG(BPF_MOV, R0, R1),
- BPF_ALU64_IMM(BPF_ADD, R0, 1),
- BPF_STX_MEM(BPF_DW, R10, R1, -40),
- BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
- BPF_LDX_MEM(BPF_DW, R0, R10, -40),
- BPF_JMP_REG(BPF_JNE, R0, R1, 1),
- BPF_ALU64_REG(BPF_SUB, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- .stack_depth = 40,
- },
- {
- "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test side effects",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
- BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
- BPF_ALU64_REG(BPF_MOV, R0, R1),
- BPF_STX_MEM(BPF_DW, R10, R1, -40),
- BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
- BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
- BPF_JMP_REG(BPF_JNE, R0, R2, 1),
- BPF_ALU64_REG(BPF_SUB, R0, R2),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- .stack_depth = 40,
- },
- /* BPF_JMP32 | BPF_JEQ | BPF_K */
- {
- "JMP32_JEQ_K: Small immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 123),
- BPF_JMP32_IMM(BPF_JEQ, R0, 321, 1),
- BPF_JMP32_IMM(BPF_JEQ, R0, 123, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 123 } }
- },
- {
- "JMP32_JEQ_K: Large immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 12345678),
- BPF_JMP32_IMM(BPF_JEQ, R0, 12345678 & 0xffff, 1),
- BPF_JMP32_IMM(BPF_JEQ, R0, 12345678, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 12345678 } }
- },
- {
- "JMP32_JEQ_K: negative immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -123),
- BPF_JMP32_IMM(BPF_JEQ, R0, 123, 1),
- BPF_JMP32_IMM(BPF_JEQ, R0, -123, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -123 } }
- },
- /* BPF_JMP32 | BPF_JEQ | BPF_X */
- {
- "JMP32_JEQ_X",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 1234),
- BPF_ALU32_IMM(BPF_MOV, R1, 4321),
- BPF_JMP32_REG(BPF_JEQ, R0, R1, 2),
- BPF_ALU32_IMM(BPF_MOV, R1, 1234),
- BPF_JMP32_REG(BPF_JEQ, R0, R1, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1234 } }
- },
- /* BPF_JMP32 | BPF_JNE | BPF_K */
- {
- "JMP32_JNE_K: Small immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 123),
- BPF_JMP32_IMM(BPF_JNE, R0, 123, 1),
- BPF_JMP32_IMM(BPF_JNE, R0, 321, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 123 } }
- },
- {
- "JMP32_JNE_K: Large immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 12345678),
- BPF_JMP32_IMM(BPF_JNE, R0, 12345678, 1),
- BPF_JMP32_IMM(BPF_JNE, R0, 12345678 & 0xffff, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 12345678 } }
- },
- {
- "JMP32_JNE_K: negative immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -123),
- BPF_JMP32_IMM(BPF_JNE, R0, -123, 1),
- BPF_JMP32_IMM(BPF_JNE, R0, 123, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -123 } }
- },
- /* BPF_JMP32 | BPF_JNE | BPF_X */
- {
- "JMP32_JNE_X",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 1234),
- BPF_ALU32_IMM(BPF_MOV, R1, 1234),
- BPF_JMP32_REG(BPF_JNE, R0, R1, 2),
- BPF_ALU32_IMM(BPF_MOV, R1, 4321),
- BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1234 } }
- },
- /* BPF_JMP32 | BPF_JSET | BPF_K */
- {
- "JMP32_JSET_K: Small immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_JMP32_IMM(BPF_JSET, R0, 2, 1),
- BPF_JMP32_IMM(BPF_JSET, R0, 3, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } }
- },
- {
- "JMP32_JSET_K: Large immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0x40000000),
- BPF_JMP32_IMM(BPF_JSET, R0, 0x3fffffff, 1),
- BPF_JMP32_IMM(BPF_JSET, R0, 0x60000000, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0x40000000 } }
- },
- {
- "JMP32_JSET_K: negative immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -123),
- BPF_JMP32_IMM(BPF_JSET, R0, -1, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -123 } }
- },
- /* BPF_JMP32 | BPF_JSET | BPF_X */
- {
- "JMP32_JSET_X",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 8),
- BPF_ALU32_IMM(BPF_MOV, R1, 7),
- BPF_JMP32_REG(BPF_JSET, R0, R1, 2),
- BPF_ALU32_IMM(BPF_MOV, R1, 8 | 2),
- BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 8 } }
- },
- /* BPF_JMP32 | BPF_JGT | BPF_K */
- {
- "JMP32_JGT_K: Small immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 123),
- BPF_JMP32_IMM(BPF_JGT, R0, 123, 1),
- BPF_JMP32_IMM(BPF_JGT, R0, 122, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 123 } }
- },
- {
- "JMP32_JGT_K: Large immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
- BPF_JMP32_IMM(BPF_JGT, R0, 0xffffffff, 1),
- BPF_JMP32_IMM(BPF_JGT, R0, 0xfffffffd, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xfffffffe } }
- },
- /* BPF_JMP32 | BPF_JGT | BPF_X */
- {
- "JMP32_JGT_X",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
- BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
- BPF_JMP32_REG(BPF_JGT, R0, R1, 2),
- BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
- BPF_JMP32_REG(BPF_JGT, R0, R1, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xfffffffe } }
- },
- /* BPF_JMP32 | BPF_JGE | BPF_K */
- {
- "JMP32_JGE_K: Small immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 123),
- BPF_JMP32_IMM(BPF_JGE, R0, 124, 1),
- BPF_JMP32_IMM(BPF_JGE, R0, 123, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 123 } }
- },
- {
- "JMP32_JGE_K: Large immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
- BPF_JMP32_IMM(BPF_JGE, R0, 0xffffffff, 1),
- BPF_JMP32_IMM(BPF_JGE, R0, 0xfffffffe, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xfffffffe } }
- },
- /* BPF_JMP32 | BPF_JGE | BPF_X */
- {
- "JMP32_JGE_X",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
- BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
- BPF_JMP32_REG(BPF_JGE, R0, R1, 2),
- BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
- BPF_JMP32_REG(BPF_JGE, R0, R1, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xfffffffe } }
- },
- /* BPF_JMP32 | BPF_JLT | BPF_K */
- {
- "JMP32_JLT_K: Small immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 123),
- BPF_JMP32_IMM(BPF_JLT, R0, 123, 1),
- BPF_JMP32_IMM(BPF_JLT, R0, 124, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 123 } }
- },
- {
- "JMP32_JLT_K: Large immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
- BPF_JMP32_IMM(BPF_JLT, R0, 0xfffffffd, 1),
- BPF_JMP32_IMM(BPF_JLT, R0, 0xffffffff, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xfffffffe } }
- },
- /* BPF_JMP32 | BPF_JLT | BPF_X */
- {
- "JMP32_JLT_X",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
- BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
- BPF_JMP32_REG(BPF_JLT, R0, R1, 2),
- BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
- BPF_JMP32_REG(BPF_JLT, R0, R1, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xfffffffe } }
- },
- /* BPF_JMP32 | BPF_JLE | BPF_K */
- {
- "JMP32_JLE_K: Small immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 123),
- BPF_JMP32_IMM(BPF_JLE, R0, 122, 1),
- BPF_JMP32_IMM(BPF_JLE, R0, 123, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 123 } }
- },
- {
- "JMP32_JLE_K: Large immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
- BPF_JMP32_IMM(BPF_JLE, R0, 0xfffffffd, 1),
- BPF_JMP32_IMM(BPF_JLE, R0, 0xfffffffe, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xfffffffe } }
- },
- /* BPF_JMP32 | BPF_JLE | BPF_X */
- {
- "JMP32_JLE_X",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
- BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
- BPF_JMP32_REG(BPF_JLE, R0, R1, 2),
- BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
- BPF_JMP32_REG(BPF_JLE, R0, R1, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xfffffffe } }
- },
- /* BPF_JMP32 | BPF_JSGT | BPF_K */
- {
- "JMP32_JSGT_K: Small immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -123),
- BPF_JMP32_IMM(BPF_JSGT, R0, -123, 1),
- BPF_JMP32_IMM(BPF_JSGT, R0, -124, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -123 } }
- },
- {
- "JMP32_JSGT_K: Large immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
- BPF_JMP32_IMM(BPF_JSGT, R0, -12345678, 1),
- BPF_JMP32_IMM(BPF_JSGT, R0, -12345679, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -12345678 } }
- },
- /* BPF_JMP32 | BPF_JSGT | BPF_X */
- {
- "JMP32_JSGT_X",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
- BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
- BPF_JMP32_REG(BPF_JSGT, R0, R1, 2),
- BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
- BPF_JMP32_REG(BPF_JSGT, R0, R1, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -12345678 } }
- },
- /* BPF_JMP32 | BPF_JSGE | BPF_K */
- {
- "JMP32_JSGE_K: Small immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -123),
- BPF_JMP32_IMM(BPF_JSGE, R0, -122, 1),
- BPF_JMP32_IMM(BPF_JSGE, R0, -123, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -123 } }
- },
- {
- "JMP32_JSGE_K: Large immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
- BPF_JMP32_IMM(BPF_JSGE, R0, -12345677, 1),
- BPF_JMP32_IMM(BPF_JSGE, R0, -12345678, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -12345678 } }
- },
- /* BPF_JMP32 | BPF_JSGE | BPF_X */
- {
- "JMP32_JSGE_X",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
- BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
- BPF_JMP32_REG(BPF_JSGE, R0, R1, 2),
- BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
- BPF_JMP32_REG(BPF_JSGE, R0, R1, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -12345678 } }
- },
- /* BPF_JMP32 | BPF_JSLT | BPF_K */
- {
- "JMP32_JSLT_K: Small immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -123),
- BPF_JMP32_IMM(BPF_JSLT, R0, -123, 1),
- BPF_JMP32_IMM(BPF_JSLT, R0, -122, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -123 } }
- },
- {
- "JMP32_JSLT_K: Large immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
- BPF_JMP32_IMM(BPF_JSLT, R0, -12345678, 1),
- BPF_JMP32_IMM(BPF_JSLT, R0, -12345677, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -12345678 } }
- },
- /* BPF_JMP32 | BPF_JSLT | BPF_X */
- {
- "JMP32_JSLT_X",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
- BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
- BPF_JMP32_REG(BPF_JSLT, R0, R1, 2),
- BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
- BPF_JMP32_REG(BPF_JSLT, R0, R1, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -12345678 } }
- },
- /* BPF_JMP32 | BPF_JSLE | BPF_K */
- {
- "JMP32_JSLE_K: Small immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -123),
- BPF_JMP32_IMM(BPF_JSLE, R0, -124, 1),
- BPF_JMP32_IMM(BPF_JSLE, R0, -123, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -123 } }
- },
- {
- "JMP32_JSLE_K: Large immediate",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
- BPF_JMP32_IMM(BPF_JSLE, R0, -12345679, 1),
- BPF_JMP32_IMM(BPF_JSLE, R0, -12345678, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -12345678 } }
- },
- /* BPF_JMP32 | BPF_JSLE | BPF_K */
- {
- "JMP32_JSLE_X",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
- BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
- BPF_JMP32_REG(BPF_JSLE, R0, R1, 2),
- BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
- BPF_JMP32_REG(BPF_JSLE, R0, R1, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, -12345678 } }
- },
- /* BPF_JMP | BPF_EXIT */
- {
- "JMP_EXIT",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0x4711),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 0x4712),
- },
- INTERNAL,
- { },
- { { 0, 0x4711 } },
- },
- /* BPF_JMP | BPF_JA */
- {
- "JMP_JA: Unconditional jump: if (true) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_JMP_IMM(BPF_JA, 0, 0, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JSLT | BPF_K */
- {
- "JMP_JSLT_K: Signed jump: if (-2 < -1) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
- BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JSLT_K: Signed jump: if (-1 < -1) return 0",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
- BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JSGT | BPF_K */
- {
- "JMP_JSGT_K: Signed jump: if (-1 > -2) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
- BPF_JMP_IMM(BPF_JSGT, R1, -2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JSGT_K: Signed jump: if (-1 > -1) return 0",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
- BPF_JMP_IMM(BPF_JSGT, R1, -1, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JSLE | BPF_K */
- {
- "JMP_JSLE_K: Signed jump: if (-2 <= -1) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
- BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JSLE_K: Signed jump: if (-1 <= -1) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
- BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JSLE_K: Signed jump: value walk 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_JMP_IMM(BPF_JSLE, R1, 0, 6),
- BPF_ALU64_IMM(BPF_SUB, R1, 1),
- BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
- BPF_ALU64_IMM(BPF_SUB, R1, 1),
- BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
- BPF_ALU64_IMM(BPF_SUB, R1, 1),
- BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
- BPF_EXIT_INSN(), /* bad exit */
- BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JSLE_K: Signed jump: value walk 2",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
- BPF_ALU64_IMM(BPF_SUB, R1, 2),
- BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
- BPF_ALU64_IMM(BPF_SUB, R1, 2),
- BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
- BPF_EXIT_INSN(), /* bad exit */
- BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JSGE | BPF_K */
- {
- "JMP_JSGE_K: Signed jump: if (-1 >= -2) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
- BPF_JMP_IMM(BPF_JSGE, R1, -2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JSGE_K: Signed jump: if (-1 >= -1) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
- BPF_JMP_IMM(BPF_JSGE, R1, -1, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JSGE_K: Signed jump: value walk 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, -3),
- BPF_JMP_IMM(BPF_JSGE, R1, 0, 6),
- BPF_ALU64_IMM(BPF_ADD, R1, 1),
- BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
- BPF_ALU64_IMM(BPF_ADD, R1, 1),
- BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
- BPF_ALU64_IMM(BPF_ADD, R1, 1),
- BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
- BPF_EXIT_INSN(), /* bad exit */
- BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JSGE_K: Signed jump: value walk 2",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, -3),
- BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
- BPF_ALU64_IMM(BPF_ADD, R1, 2),
- BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
- BPF_ALU64_IMM(BPF_ADD, R1, 2),
- BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
- BPF_EXIT_INSN(), /* bad exit */
- BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JGT | BPF_K */
- {
- "JMP_JGT_K: if (3 > 2) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_JMP_IMM(BPF_JGT, R1, 2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JGT_K: Unsigned jump: if (-1 > 1) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, -1),
- BPF_JMP_IMM(BPF_JGT, R1, 1, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JLT | BPF_K */
- {
- "JMP_JLT_K: if (2 < 3) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 2),
- BPF_JMP_IMM(BPF_JLT, R1, 3, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JGT_K: Unsigned jump: if (1 < -1) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 1),
- BPF_JMP_IMM(BPF_JLT, R1, -1, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JGE | BPF_K */
- {
- "JMP_JGE_K: if (3 >= 2) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_JMP_IMM(BPF_JGE, R1, 2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JLE | BPF_K */
- {
- "JMP_JLE_K: if (2 <= 3) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 2),
- BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JGT | BPF_K jump backwards */
- {
- "JMP_JGT_K: if (3 > 2) return 1 (jump backwards)",
- .u.insns_int = {
- BPF_JMP_IMM(BPF_JA, 0, 0, 2), /* goto start */
- BPF_ALU32_IMM(BPF_MOV, R0, 1), /* out: */
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 0), /* start: */
- BPF_LD_IMM64(R1, 3), /* note: this takes 2 insns */
- BPF_JMP_IMM(BPF_JGT, R1, 2, -6), /* goto out */
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JGE_K: if (3 >= 3) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_JMP_IMM(BPF_JGE, R1, 3, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JLT | BPF_K jump backwards */
- {
- "JMP_JGT_K: if (2 < 3) return 1 (jump backwards)",
- .u.insns_int = {
- BPF_JMP_IMM(BPF_JA, 0, 0, 2), /* goto start */
- BPF_ALU32_IMM(BPF_MOV, R0, 1), /* out: */
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 0), /* start: */
- BPF_LD_IMM64(R1, 2), /* note: this takes 2 insns */
- BPF_JMP_IMM(BPF_JLT, R1, 3, -6), /* goto out */
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JLE_K: if (3 <= 3) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JNE | BPF_K */
- {
- "JMP_JNE_K: if (3 != 2) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_JMP_IMM(BPF_JNE, R1, 2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JEQ | BPF_K */
- {
- "JMP_JEQ_K: if (3 == 3) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_JMP_IMM(BPF_JEQ, R1, 3, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JSET | BPF_K */
- {
- "JMP_JSET_K: if (0x3 & 0x2) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_JMP_IMM(BPF_JSET, R1, 2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JSET_K: if (0x3 & 0xffffffff) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_JMP_IMM(BPF_JSET, R1, 0xffffffff, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JSGT | BPF_X */
- {
- "JMP_JSGT_X: Signed jump: if (-1 > -2) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, -1),
- BPF_LD_IMM64(R2, -2),
- BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JSGT_X: Signed jump: if (-1 > -1) return 0",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_LD_IMM64(R1, -1),
- BPF_LD_IMM64(R2, -1),
- BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JSLT | BPF_X */
- {
- "JMP_JSLT_X: Signed jump: if (-2 < -1) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, -1),
- BPF_LD_IMM64(R2, -2),
- BPF_JMP_REG(BPF_JSLT, R2, R1, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JSLT_X: Signed jump: if (-1 < -1) return 0",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_LD_IMM64(R1, -1),
- BPF_LD_IMM64(R2, -1),
- BPF_JMP_REG(BPF_JSLT, R1, R2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JSGE | BPF_X */
- {
- "JMP_JSGE_X: Signed jump: if (-1 >= -2) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, -1),
- BPF_LD_IMM64(R2, -2),
- BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JSGE_X: Signed jump: if (-1 >= -1) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, -1),
- BPF_LD_IMM64(R2, -1),
- BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JSLE | BPF_X */
- {
- "JMP_JSLE_X: Signed jump: if (-2 <= -1) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, -1),
- BPF_LD_IMM64(R2, -2),
- BPF_JMP_REG(BPF_JSLE, R2, R1, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JSLE_X: Signed jump: if (-1 <= -1) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, -1),
- BPF_LD_IMM64(R2, -1),
- BPF_JMP_REG(BPF_JSLE, R1, R2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JGT | BPF_X */
- {
- "JMP_JGT_X: if (3 > 2) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_LD_IMM64(R2, 2),
- BPF_JMP_REG(BPF_JGT, R1, R2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JGT_X: Unsigned jump: if (-1 > 1) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, -1),
- BPF_LD_IMM64(R2, 1),
- BPF_JMP_REG(BPF_JGT, R1, R2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JLT | BPF_X */
- {
- "JMP_JLT_X: if (2 < 3) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_LD_IMM64(R2, 2),
- BPF_JMP_REG(BPF_JLT, R2, R1, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JLT_X: Unsigned jump: if (1 < -1) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, -1),
- BPF_LD_IMM64(R2, 1),
- BPF_JMP_REG(BPF_JLT, R2, R1, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JGE | BPF_X */
- {
- "JMP_JGE_X: if (3 >= 2) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_LD_IMM64(R2, 2),
- BPF_JMP_REG(BPF_JGE, R1, R2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JGE_X: if (3 >= 3) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_LD_IMM64(R2, 3),
- BPF_JMP_REG(BPF_JGE, R1, R2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JLE | BPF_X */
- {
- "JMP_JLE_X: if (2 <= 3) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_LD_IMM64(R2, 2),
- BPF_JMP_REG(BPF_JLE, R2, R1, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JLE_X: if (3 <= 3) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_LD_IMM64(R2, 3),
- BPF_JMP_REG(BPF_JLE, R1, R2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- /* Mainly testing JIT + imm64 here. */
- "JMP_JGE_X: ldimm64 test 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_LD_IMM64(R2, 2),
- BPF_JMP_REG(BPF_JGE, R1, R2, 2),
- BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
- BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xeeeeeeeeU } },
- },
- {
- "JMP_JGE_X: ldimm64 test 2",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_LD_IMM64(R2, 2),
- BPF_JMP_REG(BPF_JGE, R1, R2, 0),
- BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xffffffffU } },
- },
- {
- "JMP_JGE_X: ldimm64 test 3",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_LD_IMM64(R1, 3),
- BPF_LD_IMM64(R2, 2),
- BPF_JMP_REG(BPF_JGE, R1, R2, 4),
- BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
- BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JLE_X: ldimm64 test 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_LD_IMM64(R2, 2),
- BPF_JMP_REG(BPF_JLE, R2, R1, 2),
- BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
- BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xeeeeeeeeU } },
- },
- {
- "JMP_JLE_X: ldimm64 test 2",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_LD_IMM64(R2, 2),
- BPF_JMP_REG(BPF_JLE, R2, R1, 0),
- BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0xffffffffU } },
- },
- {
- "JMP_JLE_X: ldimm64 test 3",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_LD_IMM64(R1, 3),
- BPF_LD_IMM64(R2, 2),
- BPF_JMP_REG(BPF_JLE, R2, R1, 4),
- BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
- BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JNE | BPF_X */
- {
- "JMP_JNE_X: if (3 != 2) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_LD_IMM64(R2, 2),
- BPF_JMP_REG(BPF_JNE, R1, R2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JEQ | BPF_X */
- {
- "JMP_JEQ_X: if (3 == 3) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_LD_IMM64(R2, 3),
- BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- /* BPF_JMP | BPF_JSET | BPF_X */
- {
- "JMP_JSET_X: if (0x3 & 0x2) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_LD_IMM64(R2, 2),
- BPF_JMP_REG(BPF_JSET, R1, R2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JSET_X: if (0x3 & 0xffffffff) return 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R0, 0),
- BPF_LD_IMM64(R1, 3),
- BPF_LD_IMM64(R2, 0xffffffff),
- BPF_JMP_REG(BPF_JSET, R1, R2, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JA: Jump, gap, jump, ...",
- { },
- CLASSIC | FLAG_NO_DATA,
- { },
- { { 0, 0xababcbac } },
- .fill_helper = bpf_fill_ja,
- },
- { /* Mainly checking JIT here. */
- "BPF_MAXINSNS: Maximum possible literals",
- { },
- CLASSIC | FLAG_NO_DATA,
- { },
- { { 0, 0xffffffff } },
- .fill_helper = bpf_fill_maxinsns1,
- },
- { /* Mainly checking JIT here. */
- "BPF_MAXINSNS: Single literal",
- { },
- CLASSIC | FLAG_NO_DATA,
- { },
- { { 0, 0xfefefefe } },
- .fill_helper = bpf_fill_maxinsns2,
- },
- { /* Mainly checking JIT here. */
- "BPF_MAXINSNS: Run/add until end",
- { },
- CLASSIC | FLAG_NO_DATA,
- { },
- { { 0, 0x947bf368 } },
- .fill_helper = bpf_fill_maxinsns3,
- },
- {
- "BPF_MAXINSNS: Too many instructions",
- { },
- CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
- { },
- { },
- .fill_helper = bpf_fill_maxinsns4,
- .expected_errcode = -EINVAL,
- },
- { /* Mainly checking JIT here. */
- "BPF_MAXINSNS: Very long jump",
- { },
- CLASSIC | FLAG_NO_DATA,
- { },
- { { 0, 0xabababab } },
- .fill_helper = bpf_fill_maxinsns5,
- },
- { /* Mainly checking JIT here. */
- "BPF_MAXINSNS: Ctx heavy transformations",
- { },
- CLASSIC,
- { },
- {
- { 1, SKB_VLAN_PRESENT },
- { 10, SKB_VLAN_PRESENT }
- },
- .fill_helper = bpf_fill_maxinsns6,
- },
- { /* Mainly checking JIT here. */
- "BPF_MAXINSNS: Call heavy transformations",
- { },
- CLASSIC | FLAG_NO_DATA,
- { },
- { { 1, 0 }, { 10, 0 } },
- .fill_helper = bpf_fill_maxinsns7,
- },
- { /* Mainly checking JIT here. */
- "BPF_MAXINSNS: Jump heavy test",
- { },
- CLASSIC | FLAG_NO_DATA,
- { },
- { { 0, 0xffffffff } },
- .fill_helper = bpf_fill_maxinsns8,
- },
- { /* Mainly checking JIT here. */
- "BPF_MAXINSNS: Very long jump backwards",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 0xcbababab } },
- .fill_helper = bpf_fill_maxinsns9,
- },
- { /* Mainly checking JIT here. */
- "BPF_MAXINSNS: Edge hopping nuthouse",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 0xabababac } },
- .fill_helper = bpf_fill_maxinsns10,
- },
- {
- "BPF_MAXINSNS: Jump, gap, jump, ...",
- { },
- CLASSIC | FLAG_NO_DATA,
- { },
- { { 0, 0xababcbac } },
- .fill_helper = bpf_fill_maxinsns11,
- },
- {
- "BPF_MAXINSNS: jump over MSH",
- { },
- CLASSIC | FLAG_EXPECTED_FAIL,
- { 0xfa, 0xfb, 0xfc, 0xfd, },
- { { 4, 0xabababab } },
- .fill_helper = bpf_fill_maxinsns12,
- .expected_errcode = -EINVAL,
- },
- {
- "BPF_MAXINSNS: exec all MSH",
- { },
- CLASSIC,
- { 0xfa, 0xfb, 0xfc, 0xfd, },
- { { 4, 0xababab83 } },
- .fill_helper = bpf_fill_maxinsns13,
- },
- {
- "BPF_MAXINSNS: ld_abs+get_processor_id",
- { },
- CLASSIC,
- { },
- { { 1, 0xbee } },
- .fill_helper = bpf_fill_ld_abs_get_processor_id,
- },
- /*
- * LD_IND / LD_ABS on fragmented SKBs
- */
- {
- "LD_IND byte frag",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
- BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x0),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC | FLAG_SKB_FRAG,
- { },
- { {0x40, 0x42} },
- .frag_data = {
- 0x42, 0x00, 0x00, 0x00,
- 0x43, 0x44, 0x00, 0x00,
- 0x21, 0x07, 0x19, 0x83,
- },
- },
- {
- "LD_IND halfword frag",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
- BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x4),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC | FLAG_SKB_FRAG,
- { },
- { {0x40, 0x4344} },
- .frag_data = {
- 0x42, 0x00, 0x00, 0x00,
- 0x43, 0x44, 0x00, 0x00,
- 0x21, 0x07, 0x19, 0x83,
- },
- },
- {
- "LD_IND word frag",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
- BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x8),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC | FLAG_SKB_FRAG,
- { },
- { {0x40, 0x21071983} },
- .frag_data = {
- 0x42, 0x00, 0x00, 0x00,
- 0x43, 0x44, 0x00, 0x00,
- 0x21, 0x07, 0x19, 0x83,
- },
- },
- {
- "LD_IND halfword mixed head/frag",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
- BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x1),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC | FLAG_SKB_FRAG,
- { [0x3e] = 0x25, [0x3f] = 0x05, },
- { {0x40, 0x0519} },
- .frag_data = { 0x19, 0x82 },
- },
- {
- "LD_IND word mixed head/frag",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
- BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x2),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC | FLAG_SKB_FRAG,
- { [0x3e] = 0x25, [0x3f] = 0x05, },
- { {0x40, 0x25051982} },
- .frag_data = { 0x19, 0x82 },
- },
- {
- "LD_ABS byte frag",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x40),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC | FLAG_SKB_FRAG,
- { },
- { {0x40, 0x42} },
- .frag_data = {
- 0x42, 0x00, 0x00, 0x00,
- 0x43, 0x44, 0x00, 0x00,
- 0x21, 0x07, 0x19, 0x83,
- },
- },
- {
- "LD_ABS halfword frag",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x44),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC | FLAG_SKB_FRAG,
- { },
- { {0x40, 0x4344} },
- .frag_data = {
- 0x42, 0x00, 0x00, 0x00,
- 0x43, 0x44, 0x00, 0x00,
- 0x21, 0x07, 0x19, 0x83,
- },
- },
- {
- "LD_ABS word frag",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x48),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC | FLAG_SKB_FRAG,
- { },
- { {0x40, 0x21071983} },
- .frag_data = {
- 0x42, 0x00, 0x00, 0x00,
- 0x43, 0x44, 0x00, 0x00,
- 0x21, 0x07, 0x19, 0x83,
- },
- },
- {
- "LD_ABS halfword mixed head/frag",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3f),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC | FLAG_SKB_FRAG,
- { [0x3e] = 0x25, [0x3f] = 0x05, },
- { {0x40, 0x0519} },
- .frag_data = { 0x19, 0x82 },
- },
- {
- "LD_ABS word mixed head/frag",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3e),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC | FLAG_SKB_FRAG,
- { [0x3e] = 0x25, [0x3f] = 0x05, },
- { {0x40, 0x25051982} },
- .frag_data = { 0x19, 0x82 },
- },
- /*
- * LD_IND / LD_ABS on non fragmented SKBs
- */
- {
- /*
- * this tests that the JIT/interpreter correctly resets X
- * before using it in an LD_IND instruction.
- */
- "LD_IND byte default X",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x1] = 0x42 },
- { {0x40, 0x42 } },
- },
- {
- "LD_IND byte positive offset",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
- BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x40, 0x82 } },
- },
- {
- "LD_IND byte negative offset",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
- BPF_STMT(BPF_LD | BPF_IND | BPF_B, -0x1),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x40, 0x05 } },
- },
- {
- "LD_IND byte positive offset, all ff",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
- BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
- { {0x40, 0xff } },
- },
- {
- "LD_IND byte positive offset, out of bounds",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
- BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x3f, 0 }, },
- },
- {
- "LD_IND byte negative offset, out of bounds",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
- BPF_STMT(BPF_LD | BPF_IND | BPF_B, -0x3f),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x3f, 0 } },
- },
- {
- "LD_IND byte negative offset, multiple calls",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x3b),
- BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 1),
- BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 2),
- BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 3),
- BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 4),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x40, 0x82 }, },
- },
- {
- "LD_IND halfword positive offset",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
- BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x2),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- {
- [0x1c] = 0xaa, [0x1d] = 0x55,
- [0x1e] = 0xbb, [0x1f] = 0x66,
- [0x20] = 0xcc, [0x21] = 0x77,
- [0x22] = 0xdd, [0x23] = 0x88,
- },
- { {0x40, 0xdd88 } },
- },
- {
- "LD_IND halfword negative offset",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
- BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x2),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- {
- [0x1c] = 0xaa, [0x1d] = 0x55,
- [0x1e] = 0xbb, [0x1f] = 0x66,
- [0x20] = 0xcc, [0x21] = 0x77,
- [0x22] = 0xdd, [0x23] = 0x88,
- },
- { {0x40, 0xbb66 } },
- },
- {
- "LD_IND halfword unaligned",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
- BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x1),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- {
- [0x1c] = 0xaa, [0x1d] = 0x55,
- [0x1e] = 0xbb, [0x1f] = 0x66,
- [0x20] = 0xcc, [0x21] = 0x77,
- [0x22] = 0xdd, [0x23] = 0x88,
- },
- { {0x40, 0x66cc } },
- },
- {
- "LD_IND halfword positive offset, all ff",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x3d),
- BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x1),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
- { {0x40, 0xffff } },
- },
- {
- "LD_IND halfword positive offset, out of bounds",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
- BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x1),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x3f, 0 }, },
- },
- {
- "LD_IND halfword negative offset, out of bounds",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
- BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x3f),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x3f, 0 } },
- },
- {
- "LD_IND word positive offset",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
- BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x4),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- {
- [0x1c] = 0xaa, [0x1d] = 0x55,
- [0x1e] = 0xbb, [0x1f] = 0x66,
- [0x20] = 0xcc, [0x21] = 0x77,
- [0x22] = 0xdd, [0x23] = 0x88,
- [0x24] = 0xee, [0x25] = 0x99,
- [0x26] = 0xff, [0x27] = 0xaa,
- },
- { {0x40, 0xee99ffaa } },
- },
- {
- "LD_IND word negative offset",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
- BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x4),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- {
- [0x1c] = 0xaa, [0x1d] = 0x55,
- [0x1e] = 0xbb, [0x1f] = 0x66,
- [0x20] = 0xcc, [0x21] = 0x77,
- [0x22] = 0xdd, [0x23] = 0x88,
- [0x24] = 0xee, [0x25] = 0x99,
- [0x26] = 0xff, [0x27] = 0xaa,
- },
- { {0x40, 0xaa55bb66 } },
- },
- {
- "LD_IND word unaligned (addr & 3 == 2)",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
- BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x2),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- {
- [0x1c] = 0xaa, [0x1d] = 0x55,
- [0x1e] = 0xbb, [0x1f] = 0x66,
- [0x20] = 0xcc, [0x21] = 0x77,
- [0x22] = 0xdd, [0x23] = 0x88,
- [0x24] = 0xee, [0x25] = 0x99,
- [0x26] = 0xff, [0x27] = 0xaa,
- },
- { {0x40, 0xbb66cc77 } },
- },
- {
- "LD_IND word unaligned (addr & 3 == 1)",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
- BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x3),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- {
- [0x1c] = 0xaa, [0x1d] = 0x55,
- [0x1e] = 0xbb, [0x1f] = 0x66,
- [0x20] = 0xcc, [0x21] = 0x77,
- [0x22] = 0xdd, [0x23] = 0x88,
- [0x24] = 0xee, [0x25] = 0x99,
- [0x26] = 0xff, [0x27] = 0xaa,
- },
- { {0x40, 0x55bb66cc } },
- },
- {
- "LD_IND word unaligned (addr & 3 == 3)",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
- BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x1),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- {
- [0x1c] = 0xaa, [0x1d] = 0x55,
- [0x1e] = 0xbb, [0x1f] = 0x66,
- [0x20] = 0xcc, [0x21] = 0x77,
- [0x22] = 0xdd, [0x23] = 0x88,
- [0x24] = 0xee, [0x25] = 0x99,
- [0x26] = 0xff, [0x27] = 0xaa,
- },
- { {0x40, 0x66cc77dd } },
- },
- {
- "LD_IND word positive offset, all ff",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x3b),
- BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x1),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
- { {0x40, 0xffffffff } },
- },
- {
- "LD_IND word positive offset, out of bounds",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
- BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x1),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x3f, 0 }, },
- },
- {
- "LD_IND word negative offset, out of bounds",
- .u.insns = {
- BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
- BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x3f),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x3f, 0 } },
- },
- {
- "LD_ABS byte",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x20),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- {
- [0x1c] = 0xaa, [0x1d] = 0x55,
- [0x1e] = 0xbb, [0x1f] = 0x66,
- [0x20] = 0xcc, [0x21] = 0x77,
- [0x22] = 0xdd, [0x23] = 0x88,
- [0x24] = 0xee, [0x25] = 0x99,
- [0x26] = 0xff, [0x27] = 0xaa,
- },
- { {0x40, 0xcc } },
- },
- {
- "LD_ABS byte positive offset, all ff",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x3f),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
- { {0x40, 0xff } },
- },
- {
- "LD_ABS byte positive offset, out of bounds",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x3f),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x3f, 0 }, },
- },
- {
- "LD_ABS byte negative offset, out of bounds load",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_B, -1),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC | FLAG_EXPECTED_FAIL,
- .expected_errcode = -EINVAL,
- },
- {
- "LD_ABS byte negative offset, in bounds",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x40, 0x82 }, },
- },
- {
- "LD_ABS byte negative offset, out of bounds",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x3f, 0 }, },
- },
- {
- "LD_ABS byte negative offset, multiple calls",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3c),
- BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3d),
- BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3e),
- BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x40, 0x82 }, },
- },
- {
- "LD_ABS halfword",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x22),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- {
- [0x1c] = 0xaa, [0x1d] = 0x55,
- [0x1e] = 0xbb, [0x1f] = 0x66,
- [0x20] = 0xcc, [0x21] = 0x77,
- [0x22] = 0xdd, [0x23] = 0x88,
- [0x24] = 0xee, [0x25] = 0x99,
- [0x26] = 0xff, [0x27] = 0xaa,
- },
- { {0x40, 0xdd88 } },
- },
- {
- "LD_ABS halfword unaligned",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x25),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- {
- [0x1c] = 0xaa, [0x1d] = 0x55,
- [0x1e] = 0xbb, [0x1f] = 0x66,
- [0x20] = 0xcc, [0x21] = 0x77,
- [0x22] = 0xdd, [0x23] = 0x88,
- [0x24] = 0xee, [0x25] = 0x99,
- [0x26] = 0xff, [0x27] = 0xaa,
- },
- { {0x40, 0x99ff } },
- },
- {
- "LD_ABS halfword positive offset, all ff",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3e),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
- { {0x40, 0xffff } },
- },
- {
- "LD_ABS halfword positive offset, out of bounds",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3f),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x3f, 0 }, },
- },
- {
- "LD_ABS halfword negative offset, out of bounds load",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_H, -1),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC | FLAG_EXPECTED_FAIL,
- .expected_errcode = -EINVAL,
- },
- {
- "LD_ABS halfword negative offset, in bounds",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_H, SKF_LL_OFF + 0x3e),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x40, 0x1982 }, },
- },
- {
- "LD_ABS halfword negative offset, out of bounds",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_H, SKF_LL_OFF + 0x3e),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x3f, 0 }, },
- },
- {
- "LD_ABS word",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x1c),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- {
- [0x1c] = 0xaa, [0x1d] = 0x55,
- [0x1e] = 0xbb, [0x1f] = 0x66,
- [0x20] = 0xcc, [0x21] = 0x77,
- [0x22] = 0xdd, [0x23] = 0x88,
- [0x24] = 0xee, [0x25] = 0x99,
- [0x26] = 0xff, [0x27] = 0xaa,
- },
- { {0x40, 0xaa55bb66 } },
- },
- {
- "LD_ABS word unaligned (addr & 3 == 2)",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x22),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- {
- [0x1c] = 0xaa, [0x1d] = 0x55,
- [0x1e] = 0xbb, [0x1f] = 0x66,
- [0x20] = 0xcc, [0x21] = 0x77,
- [0x22] = 0xdd, [0x23] = 0x88,
- [0x24] = 0xee, [0x25] = 0x99,
- [0x26] = 0xff, [0x27] = 0xaa,
- },
- { {0x40, 0xdd88ee99 } },
- },
- {
- "LD_ABS word unaligned (addr & 3 == 1)",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x21),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- {
- [0x1c] = 0xaa, [0x1d] = 0x55,
- [0x1e] = 0xbb, [0x1f] = 0x66,
- [0x20] = 0xcc, [0x21] = 0x77,
- [0x22] = 0xdd, [0x23] = 0x88,
- [0x24] = 0xee, [0x25] = 0x99,
- [0x26] = 0xff, [0x27] = 0xaa,
- },
- { {0x40, 0x77dd88ee } },
- },
- {
- "LD_ABS word unaligned (addr & 3 == 3)",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x23),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- {
- [0x1c] = 0xaa, [0x1d] = 0x55,
- [0x1e] = 0xbb, [0x1f] = 0x66,
- [0x20] = 0xcc, [0x21] = 0x77,
- [0x22] = 0xdd, [0x23] = 0x88,
- [0x24] = 0xee, [0x25] = 0x99,
- [0x26] = 0xff, [0x27] = 0xaa,
- },
- { {0x40, 0x88ee99ff } },
- },
- {
- "LD_ABS word positive offset, all ff",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3c),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
- { {0x40, 0xffffffff } },
- },
- {
- "LD_ABS word positive offset, out of bounds",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3f),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x3f, 0 }, },
- },
- {
- "LD_ABS word negative offset, out of bounds load",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_W, -1),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC | FLAG_EXPECTED_FAIL,
- .expected_errcode = -EINVAL,
- },
- {
- "LD_ABS word negative offset, in bounds",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_W, SKF_LL_OFF + 0x3c),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x40, 0x25051982 }, },
- },
- {
- "LD_ABS word negative offset, out of bounds",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_ABS | BPF_W, SKF_LL_OFF + 0x3c),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x3f, 0 }, },
- },
- {
- "LDX_MSH standalone, preserved A",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
- BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x40, 0xffeebbaa }, },
- },
- {
- "LDX_MSH standalone, preserved A 2",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_IMM, 0x175e9d63),
- BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
- BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3d),
- BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3e),
- BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3f),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x40, 0x175e9d63 }, },
- },
- {
- "LDX_MSH standalone, test result 1",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
- BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x40, 0x14 }, },
- },
- {
- "LDX_MSH standalone, test result 2",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
- BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3e),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x40, 0x24 }, },
- },
- {
- "LDX_MSH standalone, negative offset",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
- BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, -1),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x40, 0 }, },
- },
- {
- "LDX_MSH standalone, negative offset 2",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
- BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, SKF_LL_OFF + 0x3e),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x40, 0x24 }, },
- },
- {
- "LDX_MSH standalone, out of bounds",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
- BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x40),
- BPF_STMT(BPF_MISC | BPF_TXA, 0),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC,
- { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
- { {0x40, 0 }, },
- },
- /*
- * verify that the interpreter or JIT correctly sets A and X
- * to 0.
- */
- {
- "ADD default X",
- .u.insns = {
- /*
- * A = 0x42
- * A = A + X
- * ret A
- */
- BPF_STMT(BPF_LD | BPF_IMM, 0x42),
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC | FLAG_NO_DATA,
- {},
- { {0x1, 0x42 } },
- },
- {
- "ADD default A",
- .u.insns = {
- /*
- * A = A + 0x42
- * ret A
- */
- BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0x42),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC | FLAG_NO_DATA,
- {},
- { {0x1, 0x42 } },
- },
- {
- "SUB default X",
- .u.insns = {
- /*
- * A = 0x66
- * A = A - X
- * ret A
- */
- BPF_STMT(BPF_LD | BPF_IMM, 0x66),
- BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC | FLAG_NO_DATA,
- {},
- { {0x1, 0x66 } },
- },
- {
- "SUB default A",
- .u.insns = {
- /*
- * A = A - -0x66
- * ret A
- */
- BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, -0x66),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC | FLAG_NO_DATA,
- {},
- { {0x1, 0x66 } },
- },
- {
- "MUL default X",
- .u.insns = {
- /*
- * A = 0x42
- * A = A * X
- * ret A
- */
- BPF_STMT(BPF_LD | BPF_IMM, 0x42),
- BPF_STMT(BPF_ALU | BPF_MUL | BPF_X, 0),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC | FLAG_NO_DATA,
- {},
- { {0x1, 0x0 } },
- },
- {
- "MUL default A",
- .u.insns = {
- /*
- * A = A * 0x66
- * ret A
- */
- BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 0x66),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC | FLAG_NO_DATA,
- {},
- { {0x1, 0x0 } },
- },
- {
- "DIV default X",
- .u.insns = {
- /*
- * A = 0x42
- * A = A / X ; this halt the filter execution if X is 0
- * ret 0x42
- */
- BPF_STMT(BPF_LD | BPF_IMM, 0x42),
- BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0),
- BPF_STMT(BPF_RET | BPF_K, 0x42),
- },
- CLASSIC | FLAG_NO_DATA,
- {},
- { {0x1, 0x0 } },
- },
- {
- "DIV default A",
- .u.insns = {
- /*
- * A = A / 1
- * ret A
- */
- BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x1),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC | FLAG_NO_DATA,
- {},
- { {0x1, 0x0 } },
- },
- {
- "MOD default X",
- .u.insns = {
- /*
- * A = 0x42
- * A = A mod X ; this halt the filter execution if X is 0
- * ret 0x42
- */
- BPF_STMT(BPF_LD | BPF_IMM, 0x42),
- BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0),
- BPF_STMT(BPF_RET | BPF_K, 0x42),
- },
- CLASSIC | FLAG_NO_DATA,
- {},
- { {0x1, 0x0 } },
- },
- {
- "MOD default A",
- .u.insns = {
- /*
- * A = A mod 1
- * ret A
- */
- BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x1),
- BPF_STMT(BPF_RET | BPF_A, 0x0),
- },
- CLASSIC | FLAG_NO_DATA,
- {},
- { {0x1, 0x0 } },
- },
- {
- "JMP EQ default A",
- .u.insns = {
- /*
- * cmp A, 0x0, 0, 1
- * ret 0x42
- * ret 0x66
- */
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0, 0, 1),
- BPF_STMT(BPF_RET | BPF_K, 0x42),
- BPF_STMT(BPF_RET | BPF_K, 0x66),
- },
- CLASSIC | FLAG_NO_DATA,
- {},
- { {0x1, 0x42 } },
- },
- {
- "JMP EQ default X",
- .u.insns = {
- /*
- * A = 0x0
- * cmp A, X, 0, 1
- * ret 0x42
- * ret 0x66
- */
- BPF_STMT(BPF_LD | BPF_IMM, 0x0),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0x0, 0, 1),
- BPF_STMT(BPF_RET | BPF_K, 0x42),
- BPF_STMT(BPF_RET | BPF_K, 0x66),
- },
- CLASSIC | FLAG_NO_DATA,
- {},
- { {0x1, 0x42 } },
- },
- /* Checking interpreter vs JIT wrt signed extended imms. */
- {
- "JNE signed compare, test 1",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
- BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
- BPF_MOV64_REG(R2, R1),
- BPF_ALU64_REG(BPF_AND, R2, R3),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_JMP_IMM(BPF_JNE, R2, -17104896, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 2),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JNE signed compare, test 2",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
- BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
- BPF_MOV64_REG(R2, R1),
- BPF_ALU64_REG(BPF_AND, R2, R3),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_JMP_IMM(BPF_JNE, R2, 0xfefb0000, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 2),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JNE signed compare, test 3",
- .u.insns_int = {
- BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
- BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
- BPF_ALU32_IMM(BPF_MOV, R4, 0xfefb0000),
- BPF_MOV64_REG(R2, R1),
- BPF_ALU64_REG(BPF_AND, R2, R3),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_JMP_REG(BPF_JNE, R2, R4, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 2),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- {
- "JNE signed compare, test 4",
- .u.insns_int = {
- BPF_LD_IMM64(R1, -17104896),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_JMP_IMM(BPF_JNE, R1, -17104896, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 2),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- {
- "JNE signed compare, test 5",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0xfefb0000),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_JMP_IMM(BPF_JNE, R1, 0xfefb0000, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 2),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 1 } },
- },
- {
- "JNE signed compare, test 6",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x7efb0000),
- BPF_ALU32_IMM(BPF_MOV, R0, 1),
- BPF_JMP_IMM(BPF_JNE, R1, 0x7efb0000, 1),
- BPF_ALU32_IMM(BPF_MOV, R0, 2),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 2 } },
- },
- {
- "JNE signed compare, test 7",
- .u.insns = {
- BPF_STMT(BPF_LD | BPF_IMM, 0xffff0000),
- BPF_STMT(BPF_MISC | BPF_TAX, 0),
- BPF_STMT(BPF_LD | BPF_IMM, 0xfefbbc12),
- BPF_STMT(BPF_ALU | BPF_AND | BPF_X, 0),
- BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0xfefb0000, 1, 0),
- BPF_STMT(BPF_RET | BPF_K, 1),
- BPF_STMT(BPF_RET | BPF_K, 2),
- },
- CLASSIC | FLAG_NO_DATA,
- {},
- { { 0, 2 } },
- },
- /* BPF_LDX_MEM with operand aliasing */
- {
- "LDX_MEM_B: operand register aliasing",
- .u.insns_int = {
- BPF_ST_MEM(BPF_B, R10, -8, 123),
- BPF_MOV64_REG(R0, R10),
- BPF_LDX_MEM(BPF_B, R0, R0, -8),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 123 } },
- .stack_depth = 8,
- },
- {
- "LDX_MEM_H: operand register aliasing",
- .u.insns_int = {
- BPF_ST_MEM(BPF_H, R10, -8, 12345),
- BPF_MOV64_REG(R0, R10),
- BPF_LDX_MEM(BPF_H, R0, R0, -8),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 12345 } },
- .stack_depth = 8,
- },
- {
- "LDX_MEM_W: operand register aliasing",
- .u.insns_int = {
- BPF_ST_MEM(BPF_W, R10, -8, 123456789),
- BPF_MOV64_REG(R0, R10),
- BPF_LDX_MEM(BPF_W, R0, R0, -8),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 123456789 } },
- .stack_depth = 8,
- },
- {
- "LDX_MEM_DW: operand register aliasing",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x123456789abcdefULL),
- BPF_STX_MEM(BPF_DW, R10, R1, -8),
- BPF_MOV64_REG(R0, R10),
- BPF_LDX_MEM(BPF_DW, R0, R0, -8),
- BPF_ALU64_REG(BPF_SUB, R0, R1),
- BPF_MOV64_REG(R1, R0),
- BPF_ALU64_IMM(BPF_RSH, R1, 32),
- BPF_ALU64_REG(BPF_OR, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- .stack_depth = 8,
- },
- /*
- * Register (non-)clobbering tests for the case where a JIT implements
- * complex ALU or ATOMIC operations via function calls. If so, the
- * function call must be transparent to the eBPF registers. The JIT
- * must therefore save and restore relevant registers across the call.
- * The following tests check that the eBPF registers retain their
- * values after such an operation. Mainly intended for complex ALU
- * and atomic operation, but we run it for all. You never know...
- *
- * Note that each operations should be tested twice with different
- * destinations, to check preservation for all registers.
- */
- #define BPF_TEST_CLOBBER_ALU(alu, op, dst, src) \
- { \
- #alu "_" #op " to " #dst ": no clobbering", \
- .u.insns_int = { \
- BPF_ALU64_IMM(BPF_MOV, R0, R0), \
- BPF_ALU64_IMM(BPF_MOV, R1, R1), \
- BPF_ALU64_IMM(BPF_MOV, R2, R2), \
- BPF_ALU64_IMM(BPF_MOV, R3, R3), \
- BPF_ALU64_IMM(BPF_MOV, R4, R4), \
- BPF_ALU64_IMM(BPF_MOV, R5, R5), \
- BPF_ALU64_IMM(BPF_MOV, R6, R6), \
- BPF_ALU64_IMM(BPF_MOV, R7, R7), \
- BPF_ALU64_IMM(BPF_MOV, R8, R8), \
- BPF_ALU64_IMM(BPF_MOV, R9, R9), \
- BPF_##alu(BPF_ ##op, dst, src), \
- BPF_ALU32_IMM(BPF_MOV, dst, dst), \
- BPF_JMP_IMM(BPF_JNE, R0, R0, 10), \
- BPF_JMP_IMM(BPF_JNE, R1, R1, 9), \
- BPF_JMP_IMM(BPF_JNE, R2, R2, 8), \
- BPF_JMP_IMM(BPF_JNE, R3, R3, 7), \
- BPF_JMP_IMM(BPF_JNE, R4, R4, 6), \
- BPF_JMP_IMM(BPF_JNE, R5, R5, 5), \
- BPF_JMP_IMM(BPF_JNE, R6, R6, 4), \
- BPF_JMP_IMM(BPF_JNE, R7, R7, 3), \
- BPF_JMP_IMM(BPF_JNE, R8, R8, 2), \
- BPF_JMP_IMM(BPF_JNE, R9, R9, 1), \
- BPF_ALU64_IMM(BPF_MOV, R0, 1), \
- BPF_EXIT_INSN(), \
- }, \
- INTERNAL, \
- { }, \
- { { 0, 1 } } \
- }
- /* ALU64 operations, register clobbering */
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, AND, R8, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, AND, R9, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, OR, R8, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, OR, R9, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, XOR, R8, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, XOR, R9, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, LSH, R8, 12),
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, LSH, R9, 12),
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, RSH, R8, 12),
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, RSH, R9, 12),
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, ARSH, R8, 12),
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, ARSH, R9, 12),
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, ADD, R8, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, ADD, R9, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, SUB, R8, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, SUB, R9, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, MUL, R8, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, MUL, R9, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, DIV, R8, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, DIV, R9, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, MOD, R8, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU64_IMM, MOD, R9, 123456789),
- /* ALU32 immediate operations, register clobbering */
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, AND, R8, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, AND, R9, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, OR, R8, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, OR, R9, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, XOR, R8, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, XOR, R9, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, LSH, R8, 12),
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, LSH, R9, 12),
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, RSH, R8, 12),
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, RSH, R9, 12),
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, ARSH, R8, 12),
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, ARSH, R9, 12),
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, ADD, R8, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, ADD, R9, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, SUB, R8, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, SUB, R9, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, MUL, R8, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, MUL, R9, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, DIV, R8, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, DIV, R9, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, MOD, R8, 123456789),
- BPF_TEST_CLOBBER_ALU(ALU32_IMM, MOD, R9, 123456789),
- /* ALU64 register operations, register clobbering */
- BPF_TEST_CLOBBER_ALU(ALU64_REG, AND, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU64_REG, AND, R9, R1),
- BPF_TEST_CLOBBER_ALU(ALU64_REG, OR, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU64_REG, OR, R9, R1),
- BPF_TEST_CLOBBER_ALU(ALU64_REG, XOR, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU64_REG, XOR, R9, R1),
- BPF_TEST_CLOBBER_ALU(ALU64_REG, LSH, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU64_REG, LSH, R9, R1),
- BPF_TEST_CLOBBER_ALU(ALU64_REG, RSH, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU64_REG, RSH, R9, R1),
- BPF_TEST_CLOBBER_ALU(ALU64_REG, ARSH, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU64_REG, ARSH, R9, R1),
- BPF_TEST_CLOBBER_ALU(ALU64_REG, ADD, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU64_REG, ADD, R9, R1),
- BPF_TEST_CLOBBER_ALU(ALU64_REG, SUB, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU64_REG, SUB, R9, R1),
- BPF_TEST_CLOBBER_ALU(ALU64_REG, MUL, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU64_REG, MUL, R9, R1),
- BPF_TEST_CLOBBER_ALU(ALU64_REG, DIV, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU64_REG, DIV, R9, R1),
- BPF_TEST_CLOBBER_ALU(ALU64_REG, MOD, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU64_REG, MOD, R9, R1),
- /* ALU32 register operations, register clobbering */
- BPF_TEST_CLOBBER_ALU(ALU32_REG, AND, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU32_REG, AND, R9, R1),
- BPF_TEST_CLOBBER_ALU(ALU32_REG, OR, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU32_REG, OR, R9, R1),
- BPF_TEST_CLOBBER_ALU(ALU32_REG, XOR, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU32_REG, XOR, R9, R1),
- BPF_TEST_CLOBBER_ALU(ALU32_REG, LSH, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU32_REG, LSH, R9, R1),
- BPF_TEST_CLOBBER_ALU(ALU32_REG, RSH, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU32_REG, RSH, R9, R1),
- BPF_TEST_CLOBBER_ALU(ALU32_REG, ARSH, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU32_REG, ARSH, R9, R1),
- BPF_TEST_CLOBBER_ALU(ALU32_REG, ADD, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU32_REG, ADD, R9, R1),
- BPF_TEST_CLOBBER_ALU(ALU32_REG, SUB, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU32_REG, SUB, R9, R1),
- BPF_TEST_CLOBBER_ALU(ALU32_REG, MUL, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU32_REG, MUL, R9, R1),
- BPF_TEST_CLOBBER_ALU(ALU32_REG, DIV, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU32_REG, DIV, R9, R1),
- BPF_TEST_CLOBBER_ALU(ALU32_REG, MOD, R8, R1),
- BPF_TEST_CLOBBER_ALU(ALU32_REG, MOD, R9, R1),
- #undef BPF_TEST_CLOBBER_ALU
- #define BPF_TEST_CLOBBER_ATOMIC(width, op) \
- { \
- "Atomic_" #width " " #op ": no clobbering", \
- .u.insns_int = { \
- BPF_ALU64_IMM(BPF_MOV, R0, 0), \
- BPF_ALU64_IMM(BPF_MOV, R1, 1), \
- BPF_ALU64_IMM(BPF_MOV, R2, 2), \
- BPF_ALU64_IMM(BPF_MOV, R3, 3), \
- BPF_ALU64_IMM(BPF_MOV, R4, 4), \
- BPF_ALU64_IMM(BPF_MOV, R5, 5), \
- BPF_ALU64_IMM(BPF_MOV, R6, 6), \
- BPF_ALU64_IMM(BPF_MOV, R7, 7), \
- BPF_ALU64_IMM(BPF_MOV, R8, 8), \
- BPF_ALU64_IMM(BPF_MOV, R9, 9), \
- BPF_ST_MEM(width, R10, -8, \
- (op) == BPF_CMPXCHG ? 0 : \
- (op) & BPF_FETCH ? 1 : 0), \
- BPF_ATOMIC_OP(width, op, R10, R1, -8), \
- BPF_JMP_IMM(BPF_JNE, R0, 0, 10), \
- BPF_JMP_IMM(BPF_JNE, R1, 1, 9), \
- BPF_JMP_IMM(BPF_JNE, R2, 2, 8), \
- BPF_JMP_IMM(BPF_JNE, R3, 3, 7), \
- BPF_JMP_IMM(BPF_JNE, R4, 4, 6), \
- BPF_JMP_IMM(BPF_JNE, R5, 5, 5), \
- BPF_JMP_IMM(BPF_JNE, R6, 6, 4), \
- BPF_JMP_IMM(BPF_JNE, R7, 7, 3), \
- BPF_JMP_IMM(BPF_JNE, R8, 8, 2), \
- BPF_JMP_IMM(BPF_JNE, R9, 9, 1), \
- BPF_ALU64_IMM(BPF_MOV, R0, 1), \
- BPF_EXIT_INSN(), \
- }, \
- INTERNAL, \
- { }, \
- { { 0, 1 } }, \
- .stack_depth = 8, \
- }
- /* 64-bit atomic operations, register clobbering */
- BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_ADD),
- BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_AND),
- BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_OR),
- BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XOR),
- BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_ADD | BPF_FETCH),
- BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_AND | BPF_FETCH),
- BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_OR | BPF_FETCH),
- BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XOR | BPF_FETCH),
- BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XCHG),
- BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_CMPXCHG),
- /* 32-bit atomic operations, register clobbering */
- BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_ADD),
- BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_AND),
- BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_OR),
- BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XOR),
- BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_ADD | BPF_FETCH),
- BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_AND | BPF_FETCH),
- BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_OR | BPF_FETCH),
- BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XOR | BPF_FETCH),
- BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XCHG),
- BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_CMPXCHG),
- #undef BPF_TEST_CLOBBER_ATOMIC
- /* Checking that ALU32 src is not zero extended in place */
- #define BPF_ALU32_SRC_ZEXT(op) \
- { \
- "ALU32_" #op "_X: src preserved in zext", \
- .u.insns_int = { \
- BPF_LD_IMM64(R1, 0x0123456789acbdefULL),\
- BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),\
- BPF_ALU64_REG(BPF_MOV, R0, R1), \
- BPF_ALU32_REG(BPF_##op, R2, R1), \
- BPF_ALU64_REG(BPF_SUB, R0, R1), \
- BPF_ALU64_REG(BPF_MOV, R1, R0), \
- BPF_ALU64_IMM(BPF_RSH, R1, 32), \
- BPF_ALU64_REG(BPF_OR, R0, R1), \
- BPF_EXIT_INSN(), \
- }, \
- INTERNAL, \
- { }, \
- { { 0, 0 } }, \
- }
- BPF_ALU32_SRC_ZEXT(MOV),
- BPF_ALU32_SRC_ZEXT(AND),
- BPF_ALU32_SRC_ZEXT(OR),
- BPF_ALU32_SRC_ZEXT(XOR),
- BPF_ALU32_SRC_ZEXT(ADD),
- BPF_ALU32_SRC_ZEXT(SUB),
- BPF_ALU32_SRC_ZEXT(MUL),
- BPF_ALU32_SRC_ZEXT(DIV),
- BPF_ALU32_SRC_ZEXT(MOD),
- #undef BPF_ALU32_SRC_ZEXT
- /* Checking that ATOMIC32 src is not zero extended in place */
- #define BPF_ATOMIC32_SRC_ZEXT(op) \
- { \
- "ATOMIC_W_" #op ": src preserved in zext", \
- .u.insns_int = { \
- BPF_LD_IMM64(R0, 0x0123456789acbdefULL), \
- BPF_ALU64_REG(BPF_MOV, R1, R0), \
- BPF_ST_MEM(BPF_W, R10, -4, 0), \
- BPF_ATOMIC_OP(BPF_W, BPF_##op, R10, R1, -4), \
- BPF_ALU64_REG(BPF_SUB, R0, R1), \
- BPF_ALU64_REG(BPF_MOV, R1, R0), \
- BPF_ALU64_IMM(BPF_RSH, R1, 32), \
- BPF_ALU64_REG(BPF_OR, R0, R1), \
- BPF_EXIT_INSN(), \
- }, \
- INTERNAL, \
- { }, \
- { { 0, 0 } }, \
- .stack_depth = 8, \
- }
- BPF_ATOMIC32_SRC_ZEXT(ADD),
- BPF_ATOMIC32_SRC_ZEXT(AND),
- BPF_ATOMIC32_SRC_ZEXT(OR),
- BPF_ATOMIC32_SRC_ZEXT(XOR),
- #undef BPF_ATOMIC32_SRC_ZEXT
- /* Checking that CMPXCHG32 src is not zero extended in place */
- {
- "ATOMIC_W_CMPXCHG: src preserved in zext",
- .u.insns_int = {
- BPF_LD_IMM64(R1, 0x0123456789acbdefULL),
- BPF_ALU64_REG(BPF_MOV, R2, R1),
- BPF_ALU64_REG(BPF_MOV, R0, 0),
- BPF_ST_MEM(BPF_W, R10, -4, 0),
- BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R1, -4),
- BPF_ALU64_REG(BPF_SUB, R1, R2),
- BPF_ALU64_REG(BPF_MOV, R2, R1),
- BPF_ALU64_IMM(BPF_RSH, R2, 32),
- BPF_ALU64_REG(BPF_OR, R1, R2),
- BPF_ALU64_REG(BPF_MOV, R0, R1),
- BPF_EXIT_INSN(),
- },
- INTERNAL,
- { },
- { { 0, 0 } },
- .stack_depth = 8,
- },
- /* Checking that JMP32 immediate src is not zero extended in place */
- #define BPF_JMP32_IMM_ZEXT(op) \
- { \
- "JMP32_" #op "_K: operand preserved in zext", \
- .u.insns_int = { \
- BPF_LD_IMM64(R0, 0x0123456789acbdefULL),\
- BPF_ALU64_REG(BPF_MOV, R1, R0), \
- BPF_JMP32_IMM(BPF_##op, R0, 1234, 1), \
- BPF_JMP_A(0), /* Nop */ \
- BPF_ALU64_REG(BPF_SUB, R0, R1), \
- BPF_ALU64_REG(BPF_MOV, R1, R0), \
- BPF_ALU64_IMM(BPF_RSH, R1, 32), \
- BPF_ALU64_REG(BPF_OR, R0, R1), \
- BPF_EXIT_INSN(), \
- }, \
- INTERNAL, \
- { }, \
- { { 0, 0 } }, \
- }
- BPF_JMP32_IMM_ZEXT(JEQ),
- BPF_JMP32_IMM_ZEXT(JNE),
- BPF_JMP32_IMM_ZEXT(JSET),
- BPF_JMP32_IMM_ZEXT(JGT),
- BPF_JMP32_IMM_ZEXT(JGE),
- BPF_JMP32_IMM_ZEXT(JLT),
- BPF_JMP32_IMM_ZEXT(JLE),
- BPF_JMP32_IMM_ZEXT(JSGT),
- BPF_JMP32_IMM_ZEXT(JSGE),
- BPF_JMP32_IMM_ZEXT(JSGT),
- BPF_JMP32_IMM_ZEXT(JSLT),
- BPF_JMP32_IMM_ZEXT(JSLE),
- #undef BPF_JMP2_IMM_ZEXT
- /* Checking that JMP32 dst & src are not zero extended in place */
- #define BPF_JMP32_REG_ZEXT(op) \
- { \
- "JMP32_" #op "_X: operands preserved in zext", \
- .u.insns_int = { \
- BPF_LD_IMM64(R0, 0x0123456789acbdefULL),\
- BPF_LD_IMM64(R1, 0xfedcba9876543210ULL),\
- BPF_ALU64_REG(BPF_MOV, R2, R0), \
- BPF_ALU64_REG(BPF_MOV, R3, R1), \
- BPF_JMP32_IMM(BPF_##op, R0, R1, 1), \
- BPF_JMP_A(0), /* Nop */ \
- BPF_ALU64_REG(BPF_SUB, R0, R2), \
- BPF_ALU64_REG(BPF_SUB, R1, R3), \
- BPF_ALU64_REG(BPF_OR, R0, R1), \
- BPF_ALU64_REG(BPF_MOV, R1, R0), \
- BPF_ALU64_IMM(BPF_RSH, R1, 32), \
- BPF_ALU64_REG(BPF_OR, R0, R1), \
- BPF_EXIT_INSN(), \
- }, \
- INTERNAL, \
- { }, \
- { { 0, 0 } }, \
- }
- BPF_JMP32_REG_ZEXT(JEQ),
- BPF_JMP32_REG_ZEXT(JNE),
- BPF_JMP32_REG_ZEXT(JSET),
- BPF_JMP32_REG_ZEXT(JGT),
- BPF_JMP32_REG_ZEXT(JGE),
- BPF_JMP32_REG_ZEXT(JLT),
- BPF_JMP32_REG_ZEXT(JLE),
- BPF_JMP32_REG_ZEXT(JSGT),
- BPF_JMP32_REG_ZEXT(JSGE),
- BPF_JMP32_REG_ZEXT(JSGT),
- BPF_JMP32_REG_ZEXT(JSLT),
- BPF_JMP32_REG_ZEXT(JSLE),
- #undef BPF_JMP2_REG_ZEXT
- /* ALU64 K register combinations */
- {
- "ALU64_MOV_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_mov_imm_regs,
- },
- {
- "ALU64_AND_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_and_imm_regs,
- },
- {
- "ALU64_OR_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_or_imm_regs,
- },
- {
- "ALU64_XOR_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_xor_imm_regs,
- },
- {
- "ALU64_LSH_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_lsh_imm_regs,
- },
- {
- "ALU64_RSH_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_rsh_imm_regs,
- },
- {
- "ALU64_ARSH_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_arsh_imm_regs,
- },
- {
- "ALU64_ADD_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_add_imm_regs,
- },
- {
- "ALU64_SUB_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_sub_imm_regs,
- },
- {
- "ALU64_MUL_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_mul_imm_regs,
- },
- {
- "ALU64_DIV_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_div_imm_regs,
- },
- {
- "ALU64_MOD_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_mod_imm_regs,
- },
- /* ALU32 K registers */
- {
- "ALU32_MOV_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_mov_imm_regs,
- },
- {
- "ALU32_AND_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_and_imm_regs,
- },
- {
- "ALU32_OR_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_or_imm_regs,
- },
- {
- "ALU32_XOR_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_xor_imm_regs,
- },
- {
- "ALU32_LSH_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_lsh_imm_regs,
- },
- {
- "ALU32_RSH_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_rsh_imm_regs,
- },
- {
- "ALU32_ARSH_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_arsh_imm_regs,
- },
- {
- "ALU32_ADD_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_add_imm_regs,
- },
- {
- "ALU32_SUB_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_sub_imm_regs,
- },
- {
- "ALU32_MUL_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_mul_imm_regs,
- },
- {
- "ALU32_DIV_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_div_imm_regs,
- },
- {
- "ALU32_MOD_K: registers",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_mod_imm_regs,
- },
- /* ALU64 X register combinations */
- {
- "ALU64_MOV_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_mov_reg_pairs,
- },
- {
- "ALU64_AND_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_and_reg_pairs,
- },
- {
- "ALU64_OR_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_or_reg_pairs,
- },
- {
- "ALU64_XOR_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_xor_reg_pairs,
- },
- {
- "ALU64_LSH_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_lsh_reg_pairs,
- },
- {
- "ALU64_RSH_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_rsh_reg_pairs,
- },
- {
- "ALU64_ARSH_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_arsh_reg_pairs,
- },
- {
- "ALU64_ADD_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_add_reg_pairs,
- },
- {
- "ALU64_SUB_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_sub_reg_pairs,
- },
- {
- "ALU64_MUL_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_mul_reg_pairs,
- },
- {
- "ALU64_DIV_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_div_reg_pairs,
- },
- {
- "ALU64_MOD_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_mod_reg_pairs,
- },
- /* ALU32 X register combinations */
- {
- "ALU32_MOV_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_mov_reg_pairs,
- },
- {
- "ALU32_AND_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_and_reg_pairs,
- },
- {
- "ALU32_OR_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_or_reg_pairs,
- },
- {
- "ALU32_XOR_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_xor_reg_pairs,
- },
- {
- "ALU32_LSH_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_lsh_reg_pairs,
- },
- {
- "ALU32_RSH_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_rsh_reg_pairs,
- },
- {
- "ALU32_ARSH_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_arsh_reg_pairs,
- },
- {
- "ALU32_ADD_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_add_reg_pairs,
- },
- {
- "ALU32_SUB_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_sub_reg_pairs,
- },
- {
- "ALU32_MUL_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_mul_reg_pairs,
- },
- {
- "ALU32_DIV_X: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_div_reg_pairs,
- },
- {
- "ALU32_MOD_X register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_mod_reg_pairs,
- },
- /* Exhaustive test of ALU64 shift operations */
- {
- "ALU64_LSH_K: all shift values",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_lsh_imm,
- },
- {
- "ALU64_RSH_K: all shift values",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_rsh_imm,
- },
- {
- "ALU64_ARSH_K: all shift values",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_arsh_imm,
- },
- {
- "ALU64_LSH_X: all shift values",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_lsh_reg,
- },
- {
- "ALU64_RSH_X: all shift values",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_rsh_reg,
- },
- {
- "ALU64_ARSH_X: all shift values",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_arsh_reg,
- },
- /* Exhaustive test of ALU32 shift operations */
- {
- "ALU32_LSH_K: all shift values",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_lsh_imm,
- },
- {
- "ALU32_RSH_K: all shift values",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_rsh_imm,
- },
- {
- "ALU32_ARSH_K: all shift values",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_arsh_imm,
- },
- {
- "ALU32_LSH_X: all shift values",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_lsh_reg,
- },
- {
- "ALU32_RSH_X: all shift values",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_rsh_reg,
- },
- {
- "ALU32_ARSH_X: all shift values",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_arsh_reg,
- },
- /*
- * Exhaustive test of ALU64 shift operations when
- * source and destination register are the same.
- */
- {
- "ALU64_LSH_X: all shift values with the same register",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_lsh_same_reg,
- },
- {
- "ALU64_RSH_X: all shift values with the same register",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_rsh_same_reg,
- },
- {
- "ALU64_ARSH_X: all shift values with the same register",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_arsh_same_reg,
- },
- /*
- * Exhaustive test of ALU32 shift operations when
- * source and destination register are the same.
- */
- {
- "ALU32_LSH_X: all shift values with the same register",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_lsh_same_reg,
- },
- {
- "ALU32_RSH_X: all shift values with the same register",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_rsh_same_reg,
- },
- {
- "ALU32_ARSH_X: all shift values with the same register",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_arsh_same_reg,
- },
- /* ALU64 immediate magnitudes */
- {
- "ALU64_MOV_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_mov_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU64_AND_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_and_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU64_OR_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_or_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU64_XOR_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_xor_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU64_ADD_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_add_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU64_SUB_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_sub_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU64_MUL_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_mul_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU64_DIV_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_div_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU64_MOD_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_mod_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- /* ALU32 immediate magnitudes */
- {
- "ALU32_MOV_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_mov_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU32_AND_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_and_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU32_OR_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_or_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU32_XOR_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_xor_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU32_ADD_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_add_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU32_SUB_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_sub_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU32_MUL_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_mul_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU32_DIV_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_div_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU32_MOD_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_mod_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- /* ALU64 register magnitudes */
- {
- "ALU64_MOV_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_mov_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU64_AND_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_and_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU64_OR_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_or_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU64_XOR_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_xor_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU64_ADD_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_add_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU64_SUB_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_sub_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU64_MUL_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_mul_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU64_DIV_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_div_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU64_MOD_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu64_mod_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- /* ALU32 register magnitudes */
- {
- "ALU32_MOV_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_mov_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU32_AND_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_and_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU32_OR_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_or_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU32_XOR_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_xor_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU32_ADD_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_add_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU32_SUB_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_sub_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU32_MUL_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_mul_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU32_DIV_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_div_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ALU32_MOD_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_alu32_mod_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- /* LD_IMM64 immediate magnitudes and byte patterns */
- {
- "LD_IMM64: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_ld_imm64_magn,
- },
- {
- "LD_IMM64: checker byte patterns",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_ld_imm64_checker,
- },
- {
- "LD_IMM64: random positive and zero byte patterns",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_ld_imm64_pos_zero,
- },
- {
- "LD_IMM64: random negative and zero byte patterns",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_ld_imm64_neg_zero,
- },
- {
- "LD_IMM64: random positive and negative byte patterns",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_ld_imm64_pos_neg,
- },
- /* 64-bit ATOMIC register combinations */
- {
- "ATOMIC_DW_ADD: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic64_add_reg_pairs,
- .stack_depth = 8,
- },
- {
- "ATOMIC_DW_AND: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic64_and_reg_pairs,
- .stack_depth = 8,
- },
- {
- "ATOMIC_DW_OR: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic64_or_reg_pairs,
- .stack_depth = 8,
- },
- {
- "ATOMIC_DW_XOR: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic64_xor_reg_pairs,
- .stack_depth = 8,
- },
- {
- "ATOMIC_DW_ADD_FETCH: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic64_add_fetch_reg_pairs,
- .stack_depth = 8,
- },
- {
- "ATOMIC_DW_AND_FETCH: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic64_and_fetch_reg_pairs,
- .stack_depth = 8,
- },
- {
- "ATOMIC_DW_OR_FETCH: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic64_or_fetch_reg_pairs,
- .stack_depth = 8,
- },
- {
- "ATOMIC_DW_XOR_FETCH: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic64_xor_fetch_reg_pairs,
- .stack_depth = 8,
- },
- {
- "ATOMIC_DW_XCHG: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic64_xchg_reg_pairs,
- .stack_depth = 8,
- },
- {
- "ATOMIC_DW_CMPXCHG: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic64_cmpxchg_reg_pairs,
- .stack_depth = 8,
- },
- /* 32-bit ATOMIC register combinations */
- {
- "ATOMIC_W_ADD: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic32_add_reg_pairs,
- .stack_depth = 8,
- },
- {
- "ATOMIC_W_AND: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic32_and_reg_pairs,
- .stack_depth = 8,
- },
- {
- "ATOMIC_W_OR: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic32_or_reg_pairs,
- .stack_depth = 8,
- },
- {
- "ATOMIC_W_XOR: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic32_xor_reg_pairs,
- .stack_depth = 8,
- },
- {
- "ATOMIC_W_ADD_FETCH: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic32_add_fetch_reg_pairs,
- .stack_depth = 8,
- },
- {
- "ATOMIC_W_AND_FETCH: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic32_and_fetch_reg_pairs,
- .stack_depth = 8,
- },
- {
- "ATOMIC_W_OR_FETCH: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic32_or_fetch_reg_pairs,
- .stack_depth = 8,
- },
- {
- "ATOMIC_W_XOR_FETCH: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic32_xor_fetch_reg_pairs,
- .stack_depth = 8,
- },
- {
- "ATOMIC_W_XCHG: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic32_xchg_reg_pairs,
- .stack_depth = 8,
- },
- {
- "ATOMIC_W_CMPXCHG: register combinations",
- { },
- INTERNAL,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic32_cmpxchg_reg_pairs,
- .stack_depth = 8,
- },
- /* 64-bit ATOMIC magnitudes */
- {
- "ATOMIC_DW_ADD: all operand magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic64_add,
- .stack_depth = 8,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ATOMIC_DW_AND: all operand magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic64_and,
- .stack_depth = 8,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ATOMIC_DW_OR: all operand magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic64_or,
- .stack_depth = 8,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ATOMIC_DW_XOR: all operand magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic64_xor,
- .stack_depth = 8,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ATOMIC_DW_ADD_FETCH: all operand magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic64_add_fetch,
- .stack_depth = 8,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ATOMIC_DW_AND_FETCH: all operand magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic64_and_fetch,
- .stack_depth = 8,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ATOMIC_DW_OR_FETCH: all operand magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic64_or_fetch,
- .stack_depth = 8,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ATOMIC_DW_XOR_FETCH: all operand magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic64_xor_fetch,
- .stack_depth = 8,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ATOMIC_DW_XCHG: all operand magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic64_xchg,
- .stack_depth = 8,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ATOMIC_DW_CMPXCHG: all operand magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_cmpxchg64,
- .stack_depth = 8,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- /* 64-bit atomic magnitudes */
- {
- "ATOMIC_W_ADD: all operand magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic32_add,
- .stack_depth = 8,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ATOMIC_W_AND: all operand magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic32_and,
- .stack_depth = 8,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ATOMIC_W_OR: all operand magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic32_or,
- .stack_depth = 8,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ATOMIC_W_XOR: all operand magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic32_xor,
- .stack_depth = 8,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ATOMIC_W_ADD_FETCH: all operand magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic32_add_fetch,
- .stack_depth = 8,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ATOMIC_W_AND_FETCH: all operand magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic32_and_fetch,
- .stack_depth = 8,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ATOMIC_W_OR_FETCH: all operand magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic32_or_fetch,
- .stack_depth = 8,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ATOMIC_W_XOR_FETCH: all operand magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic32_xor_fetch,
- .stack_depth = 8,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ATOMIC_W_XCHG: all operand magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_atomic32_xchg,
- .stack_depth = 8,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "ATOMIC_W_CMPXCHG: all operand magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_cmpxchg32,
- .stack_depth = 8,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- /* JMP immediate magnitudes */
- {
- "JMP_JSET_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jset_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP_JEQ_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jeq_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP_JNE_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jne_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP_JGT_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jgt_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP_JGE_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jge_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP_JLT_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jlt_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP_JLE_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jle_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP_JSGT_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jsgt_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP_JSGE_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jsge_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP_JSLT_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jslt_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP_JSLE_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jsle_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- /* JMP register magnitudes */
- {
- "JMP_JSET_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jset_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP_JEQ_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jeq_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP_JNE_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jne_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP_JGT_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jgt_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP_JGE_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jge_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP_JLT_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jlt_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP_JLE_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jle_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP_JSGT_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jsgt_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP_JSGE_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jsge_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP_JSLT_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jslt_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP_JSLE_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp_jsle_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- /* JMP32 immediate magnitudes */
- {
- "JMP32_JSET_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jset_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP32_JEQ_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jeq_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP32_JNE_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jne_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP32_JGT_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jgt_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP32_JGE_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jge_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP32_JLT_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jlt_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP32_JLE_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jle_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP32_JSGT_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jsgt_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP32_JSGE_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jsge_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP32_JSLT_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jslt_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP32_JSLE_K: all immediate value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jsle_imm,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- /* JMP32 register magnitudes */
- {
- "JMP32_JSET_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jset_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP32_JEQ_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jeq_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP32_JNE_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jne_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP32_JGT_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jgt_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP32_JGE_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jge_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP32_JLT_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jlt_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP32_JLE_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jle_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP32_JSGT_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jsgt_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP32_JSGE_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jsge_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP32_JSLT_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jslt_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- {
- "JMP32_JSLE_X: all register value magnitudes",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_jmp32_jsle_reg,
- .nr_testruns = NR_PATTERN_RUNS,
- },
- /* Conditional jumps with constant decision */
- {
- "JMP_JSET_K: imm = 0 -> never taken",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 1),
- BPF_JMP_IMM(BPF_JSET, R1, 0, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 0 } },
- },
- {
- "JMP_JLT_K: imm = 0 -> never taken",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 1),
- BPF_JMP_IMM(BPF_JLT, R1, 0, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 0 } },
- },
- {
- "JMP_JGE_K: imm = 0 -> always taken",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 1),
- BPF_JMP_IMM(BPF_JGE, R1, 0, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JGT_K: imm = 0xffffffff -> never taken",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 1),
- BPF_JMP_IMM(BPF_JGT, R1, U32_MAX, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 0 } },
- },
- {
- "JMP_JLE_K: imm = 0xffffffff -> always taken",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 1),
- BPF_JMP_IMM(BPF_JLE, R1, U32_MAX, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- },
- {
- "JMP32_JSGT_K: imm = 0x7fffffff -> never taken",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 1),
- BPF_JMP32_IMM(BPF_JSGT, R1, S32_MAX, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 0 } },
- },
- {
- "JMP32_JSGE_K: imm = -0x80000000 -> always taken",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 1),
- BPF_JMP32_IMM(BPF_JSGE, R1, S32_MIN, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- },
- {
- "JMP32_JSLT_K: imm = -0x80000000 -> never taken",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 1),
- BPF_JMP32_IMM(BPF_JSLT, R1, S32_MIN, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 0 } },
- },
- {
- "JMP32_JSLE_K: imm = 0x7fffffff -> always taken",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 1),
- BPF_JMP32_IMM(BPF_JSLE, R1, S32_MAX, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JEQ_X: dst = src -> always taken",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 1),
- BPF_JMP_REG(BPF_JEQ, R1, R1, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JGE_X: dst = src -> always taken",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 1),
- BPF_JMP_REG(BPF_JGE, R1, R1, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JLE_X: dst = src -> always taken",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 1),
- BPF_JMP_REG(BPF_JLE, R1, R1, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JSGE_X: dst = src -> always taken",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 1),
- BPF_JMP_REG(BPF_JSGE, R1, R1, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JSLE_X: dst = src -> always taken",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 1),
- BPF_JMP_REG(BPF_JSLE, R1, R1, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- },
- {
- "JMP_JNE_X: dst = src -> never taken",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 1),
- BPF_JMP_REG(BPF_JNE, R1, R1, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 0 } },
- },
- {
- "JMP_JGT_X: dst = src -> never taken",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 1),
- BPF_JMP_REG(BPF_JGT, R1, R1, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 0 } },
- },
- {
- "JMP_JLT_X: dst = src -> never taken",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 1),
- BPF_JMP_REG(BPF_JLT, R1, R1, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 0 } },
- },
- {
- "JMP_JSGT_X: dst = src -> never taken",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 1),
- BPF_JMP_REG(BPF_JSGT, R1, R1, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 0 } },
- },
- {
- "JMP_JSLT_X: dst = src -> never taken",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 1),
- BPF_JMP_REG(BPF_JSLT, R1, R1, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 0 } },
- },
- /* Short relative jumps */
- {
- "Short relative jump: offset=0",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_JMP_IMM(BPF_JEQ, R0, 0, 0),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, -1),
- },
- INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
- { },
- { { 0, 0 } },
- },
- {
- "Short relative jump: offset=1",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_JMP_IMM(BPF_JEQ, R0, 0, 1),
- BPF_ALU32_IMM(BPF_ADD, R0, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, -1),
- },
- INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
- { },
- { { 0, 0 } },
- },
- {
- "Short relative jump: offset=2",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
- BPF_ALU32_IMM(BPF_ADD, R0, 1),
- BPF_ALU32_IMM(BPF_ADD, R0, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, -1),
- },
- INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
- { },
- { { 0, 0 } },
- },
- {
- "Short relative jump: offset=3",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_JMP_IMM(BPF_JEQ, R0, 0, 3),
- BPF_ALU32_IMM(BPF_ADD, R0, 1),
- BPF_ALU32_IMM(BPF_ADD, R0, 1),
- BPF_ALU32_IMM(BPF_ADD, R0, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, -1),
- },
- INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
- { },
- { { 0, 0 } },
- },
- {
- "Short relative jump: offset=4",
- .u.insns_int = {
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_JMP_IMM(BPF_JEQ, R0, 0, 4),
- BPF_ALU32_IMM(BPF_ADD, R0, 1),
- BPF_ALU32_IMM(BPF_ADD, R0, 1),
- BPF_ALU32_IMM(BPF_ADD, R0, 1),
- BPF_ALU32_IMM(BPF_ADD, R0, 1),
- BPF_EXIT_INSN(),
- BPF_ALU32_IMM(BPF_MOV, R0, -1),
- },
- INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
- { },
- { { 0, 0 } },
- },
- /* Conditional branch conversions */
- {
- "Long conditional jump: taken at runtime",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_max_jmp_taken,
- },
- {
- "Long conditional jump: not taken at runtime",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 2 } },
- .fill_helper = bpf_fill_max_jmp_not_taken,
- },
- {
- "Long conditional jump: always taken, known at JIT time",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 1 } },
- .fill_helper = bpf_fill_max_jmp_always_taken,
- },
- {
- "Long conditional jump: never taken, known at JIT time",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, 2 } },
- .fill_helper = bpf_fill_max_jmp_never_taken,
- },
- /* Staggered jump sequences, immediate */
- {
- "Staggered jumps: JMP_JA",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_ja,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP_JEQ_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jeq_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP_JNE_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jne_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP_JSET_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jset_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP_JGT_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jgt_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP_JGE_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jge_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP_JLT_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jlt_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP_JLE_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jle_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP_JSGT_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jsgt_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP_JSGE_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jsge_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP_JSLT_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jslt_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP_JSLE_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jsle_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- /* Staggered jump sequences, register */
- {
- "Staggered jumps: JMP_JEQ_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jeq_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP_JNE_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jne_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP_JSET_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jset_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP_JGT_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jgt_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP_JGE_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jge_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP_JLT_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jlt_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP_JLE_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jle_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP_JSGT_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jsgt_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP_JSGE_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jsge_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP_JSLT_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jslt_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP_JSLE_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jsle_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- /* Staggered jump sequences, JMP32 immediate */
- {
- "Staggered jumps: JMP32_JEQ_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jeq32_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP32_JNE_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jne32_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP32_JSET_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jset32_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP32_JGT_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jgt32_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP32_JGE_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jge32_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP32_JLT_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jlt32_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP32_JLE_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jle32_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP32_JSGT_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jsgt32_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP32_JSGE_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jsge32_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP32_JSLT_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jslt32_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP32_JSLE_K",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jsle32_imm,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- /* Staggered jump sequences, JMP32 register */
- {
- "Staggered jumps: JMP32_JEQ_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jeq32_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP32_JNE_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jne32_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP32_JSET_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jset32_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP32_JGT_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jgt32_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP32_JGE_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jge32_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP32_JLT_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jlt32_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP32_JLE_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jle32_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP32_JSGT_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jsgt32_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP32_JSGE_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jsge32_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP32_JSLT_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jslt32_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- {
- "Staggered jumps: JMP32_JSLE_X",
- { },
- INTERNAL | FLAG_NO_DATA,
- { },
- { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
- .fill_helper = bpf_fill_staggered_jsle32_reg,
- .nr_testruns = NR_STAGGERED_JMP_RUNS,
- },
- };
- static struct net_device dev;
- static struct sk_buff *populate_skb(char *buf, int size)
- {
- struct sk_buff *skb;
- if (size >= MAX_DATA)
- return NULL;
- skb = alloc_skb(MAX_DATA, GFP_KERNEL);
- if (!skb)
- return NULL;
- __skb_put_data(skb, buf, size);
- /* Initialize a fake skb with test pattern. */
- skb_reset_mac_header(skb);
- skb->protocol = htons(ETH_P_IP);
- skb->pkt_type = SKB_TYPE;
- skb->mark = SKB_MARK;
- skb->hash = SKB_HASH;
- skb->queue_mapping = SKB_QUEUE_MAP;
- skb->vlan_tci = SKB_VLAN_TCI;
- skb->vlan_present = SKB_VLAN_PRESENT;
- skb->vlan_proto = htons(ETH_P_IP);
- dev_net_set(&dev, &init_net);
- skb->dev = &dev;
- skb->dev->ifindex = SKB_DEV_IFINDEX;
- skb->dev->type = SKB_DEV_TYPE;
- skb_set_network_header(skb, min(size, ETH_HLEN));
- return skb;
- }
- static void *generate_test_data(struct bpf_test *test, int sub)
- {
- struct sk_buff *skb;
- struct page *page;
- if (test->aux & FLAG_NO_DATA)
- return NULL;
- if (test->aux & FLAG_LARGE_MEM)
- return kmalloc(test->test[sub].data_size, GFP_KERNEL);
- /* Test case expects an skb, so populate one. Various
- * subtests generate skbs of different sizes based on
- * the same data.
- */
- skb = populate_skb(test->data, test->test[sub].data_size);
- if (!skb)
- return NULL;
- if (test->aux & FLAG_SKB_FRAG) {
- /*
- * when the test requires a fragmented skb, add a
- * single fragment to the skb, filled with
- * test->frag_data.
- */
- void *ptr;
- page = alloc_page(GFP_KERNEL);
- if (!page)
- goto err_kfree_skb;
- ptr = kmap(page);
- if (!ptr)
- goto err_free_page;
- memcpy(ptr, test->frag_data, MAX_DATA);
- kunmap(page);
- skb_add_rx_frag(skb, 0, page, 0, MAX_DATA, MAX_DATA);
- }
- return skb;
- err_free_page:
- __free_page(page);
- err_kfree_skb:
- kfree_skb(skb);
- return NULL;
- }
- static void release_test_data(const struct bpf_test *test, void *data)
- {
- if (test->aux & FLAG_NO_DATA)
- return;
- if (test->aux & FLAG_LARGE_MEM)
- kfree(data);
- else
- kfree_skb(data);
- }
- static int filter_length(int which)
- {
- struct sock_filter *fp;
- int len;
- if (tests[which].fill_helper)
- return tests[which].u.ptr.len;
- fp = tests[which].u.insns;
- for (len = MAX_INSNS - 1; len > 0; --len)
- if (fp[len].code != 0 || fp[len].k != 0)
- break;
- return len + 1;
- }
- static void *filter_pointer(int which)
- {
- if (tests[which].fill_helper)
- return tests[which].u.ptr.insns;
- else
- return tests[which].u.insns;
- }
- static struct bpf_prog *generate_filter(int which, int *err)
- {
- __u8 test_type = tests[which].aux & TEST_TYPE_MASK;
- unsigned int flen = filter_length(which);
- void *fptr = filter_pointer(which);
- struct sock_fprog_kern fprog;
- struct bpf_prog *fp;
- switch (test_type) {
- case CLASSIC:
- fprog.filter = fptr;
- fprog.len = flen;
- *err = bpf_prog_create(&fp, &fprog);
- if (tests[which].aux & FLAG_EXPECTED_FAIL) {
- if (*err == tests[which].expected_errcode) {
- pr_cont("PASS\n");
- /* Verifier rejected filter as expected. */
- *err = 0;
- return NULL;
- } else {
- pr_cont("UNEXPECTED_PASS\n");
- /* Verifier didn't reject the test that's
- * bad enough, just return!
- */
- *err = -EINVAL;
- return NULL;
- }
- }
- if (*err) {
- pr_cont("FAIL to prog_create err=%d len=%d\n",
- *err, fprog.len);
- return NULL;
- }
- break;
- case INTERNAL:
- fp = bpf_prog_alloc(bpf_prog_size(flen), 0);
- if (fp == NULL) {
- pr_cont("UNEXPECTED_FAIL no memory left\n");
- *err = -ENOMEM;
- return NULL;
- }
- fp->len = flen;
- /* Type doesn't really matter here as long as it's not unspec. */
- fp->type = BPF_PROG_TYPE_SOCKET_FILTER;
- memcpy(fp->insnsi, fptr, fp->len * sizeof(struct bpf_insn));
- fp->aux->stack_depth = tests[which].stack_depth;
- fp->aux->verifier_zext = !!(tests[which].aux &
- FLAG_VERIFIER_ZEXT);
- /* We cannot error here as we don't need type compatibility
- * checks.
- */
- fp = bpf_prog_select_runtime(fp, err);
- if (*err) {
- pr_cont("FAIL to select_runtime err=%d\n", *err);
- return NULL;
- }
- break;
- }
- *err = 0;
- return fp;
- }
- static void release_filter(struct bpf_prog *fp, int which)
- {
- __u8 test_type = tests[which].aux & TEST_TYPE_MASK;
- switch (test_type) {
- case CLASSIC:
- bpf_prog_destroy(fp);
- break;
- case INTERNAL:
- bpf_prog_free(fp);
- break;
- }
- }
- static int __run_one(const struct bpf_prog *fp, const void *data,
- int runs, u64 *duration)
- {
- u64 start, finish;
- int ret = 0, i;
- migrate_disable();
- start = ktime_get_ns();
- for (i = 0; i < runs; i++)
- ret = bpf_prog_run(fp, data);
- finish = ktime_get_ns();
- migrate_enable();
- *duration = finish - start;
- do_div(*duration, runs);
- return ret;
- }
- static int run_one(const struct bpf_prog *fp, struct bpf_test *test)
- {
- int err_cnt = 0, i, runs = MAX_TESTRUNS;
- if (test->nr_testruns)
- runs = min(test->nr_testruns, MAX_TESTRUNS);
- for (i = 0; i < MAX_SUBTESTS; i++) {
- void *data;
- u64 duration;
- u32 ret;
- /*
- * NOTE: Several sub-tests may be present, in which case
- * a zero {data_size, result} tuple indicates the end of
- * the sub-test array. The first test is always run,
- * even if both data_size and result happen to be zero.
- */
- if (i > 0 &&
- test->test[i].data_size == 0 &&
- test->test[i].result == 0)
- break;
- data = generate_test_data(test, i);
- if (!data && !(test->aux & FLAG_NO_DATA)) {
- pr_cont("data generation failed ");
- err_cnt++;
- break;
- }
- ret = __run_one(fp, data, runs, &duration);
- release_test_data(test, data);
- if (ret == test->test[i].result) {
- pr_cont("%lld ", duration);
- } else {
- pr_cont("ret %d != %d ", ret,
- test->test[i].result);
- err_cnt++;
- }
- }
- return err_cnt;
- }
- static char test_name[64];
- module_param_string(test_name, test_name, sizeof(test_name), 0);
- static int test_id = -1;
- module_param(test_id, int, 0);
- static int test_range[2] = { 0, INT_MAX };
- module_param_array(test_range, int, NULL, 0);
- static bool exclude_test(int test_id)
- {
- return test_id < test_range[0] || test_id > test_range[1];
- }
- static __init struct sk_buff *build_test_skb(void)
- {
- u32 headroom = NET_SKB_PAD + NET_IP_ALIGN + ETH_HLEN;
- struct sk_buff *skb[2];
- struct page *page[2];
- int i, data_size = 8;
- for (i = 0; i < 2; i++) {
- page[i] = alloc_page(GFP_KERNEL);
- if (!page[i]) {
- if (i == 0)
- goto err_page0;
- else
- goto err_page1;
- }
- /* this will set skb[i]->head_frag */
- skb[i] = dev_alloc_skb(headroom + data_size);
- if (!skb[i]) {
- if (i == 0)
- goto err_skb0;
- else
- goto err_skb1;
- }
- skb_reserve(skb[i], headroom);
- skb_put(skb[i], data_size);
- skb[i]->protocol = htons(ETH_P_IP);
- skb_reset_network_header(skb[i]);
- skb_set_mac_header(skb[i], -ETH_HLEN);
- skb_add_rx_frag(skb[i], 0, page[i], 0, 64, 64);
- // skb_headlen(skb[i]): 8, skb[i]->head_frag = 1
- }
- /* setup shinfo */
- skb_shinfo(skb[0])->gso_size = 1448;
- skb_shinfo(skb[0])->gso_type = SKB_GSO_TCPV4;
- skb_shinfo(skb[0])->gso_type |= SKB_GSO_DODGY;
- skb_shinfo(skb[0])->gso_segs = 0;
- skb_shinfo(skb[0])->frag_list = skb[1];
- skb_shinfo(skb[0])->hwtstamps.hwtstamp = 1000;
- /* adjust skb[0]'s len */
- skb[0]->len += skb[1]->len;
- skb[0]->data_len += skb[1]->data_len;
- skb[0]->truesize += skb[1]->truesize;
- return skb[0];
- err_skb1:
- __free_page(page[1]);
- err_page1:
- kfree_skb(skb[0]);
- err_skb0:
- __free_page(page[0]);
- err_page0:
- return NULL;
- }
- static __init struct sk_buff *build_test_skb_linear_no_head_frag(void)
- {
- unsigned int alloc_size = 2000;
- unsigned int headroom = 102, doffset = 72, data_size = 1308;
- struct sk_buff *skb[2];
- int i;
- /* skbs linked in a frag_list, both with linear data, with head_frag=0
- * (data allocated by kmalloc), both have tcp data of 1308 bytes
- * (total payload is 2616 bytes).
- * Data offset is 72 bytes (40 ipv6 hdr, 32 tcp hdr). Some headroom.
- */
- for (i = 0; i < 2; i++) {
- skb[i] = alloc_skb(alloc_size, GFP_KERNEL);
- if (!skb[i]) {
- if (i == 0)
- goto err_skb0;
- else
- goto err_skb1;
- }
- skb[i]->protocol = htons(ETH_P_IPV6);
- skb_reserve(skb[i], headroom);
- skb_put(skb[i], doffset + data_size);
- skb_reset_network_header(skb[i]);
- if (i == 0)
- skb_reset_mac_header(skb[i]);
- else
- skb_set_mac_header(skb[i], -ETH_HLEN);
- __skb_pull(skb[i], doffset);
- }
- /* setup shinfo.
- * mimic bpf_skb_proto_4_to_6, which resets gso_segs and assigns a
- * reduced gso_size.
- */
- skb_shinfo(skb[0])->gso_size = 1288;
- skb_shinfo(skb[0])->gso_type = SKB_GSO_TCPV6 | SKB_GSO_DODGY;
- skb_shinfo(skb[0])->gso_segs = 0;
- skb_shinfo(skb[0])->frag_list = skb[1];
- /* adjust skb[0]'s len */
- skb[0]->len += skb[1]->len;
- skb[0]->data_len += skb[1]->len;
- skb[0]->truesize += skb[1]->truesize;
- return skb[0];
- err_skb1:
- kfree_skb(skb[0]);
- err_skb0:
- return NULL;
- }
- struct skb_segment_test {
- const char *descr;
- struct sk_buff *(*build_skb)(void);
- netdev_features_t features;
- };
- static struct skb_segment_test skb_segment_tests[] __initconst = {
- {
- .descr = "gso_with_rx_frags",
- .build_skb = build_test_skb,
- .features = NETIF_F_SG | NETIF_F_GSO_PARTIAL | NETIF_F_IP_CSUM |
- NETIF_F_IPV6_CSUM | NETIF_F_RXCSUM
- },
- {
- .descr = "gso_linear_no_head_frag",
- .build_skb = build_test_skb_linear_no_head_frag,
- .features = NETIF_F_SG | NETIF_F_FRAGLIST |
- NETIF_F_HW_VLAN_CTAG_TX | NETIF_F_GSO |
- NETIF_F_LLTX | NETIF_F_GRO |
- NETIF_F_IPV6_CSUM | NETIF_F_RXCSUM |
- NETIF_F_HW_VLAN_STAG_TX
- }
- };
- static __init int test_skb_segment_single(const struct skb_segment_test *test)
- {
- struct sk_buff *skb, *segs;
- int ret = -1;
- skb = test->build_skb();
- if (!skb) {
- pr_info("%s: failed to build_test_skb", __func__);
- goto done;
- }
- segs = skb_segment(skb, test->features);
- if (!IS_ERR(segs)) {
- kfree_skb_list(segs);
- ret = 0;
- }
- kfree_skb(skb);
- done:
- return ret;
- }
- static __init int test_skb_segment(void)
- {
- int i, err_cnt = 0, pass_cnt = 0;
- for (i = 0; i < ARRAY_SIZE(skb_segment_tests); i++) {
- const struct skb_segment_test *test = &skb_segment_tests[i];
- cond_resched();
- if (exclude_test(i))
- continue;
- pr_info("#%d %s ", i, test->descr);
- if (test_skb_segment_single(test)) {
- pr_cont("FAIL\n");
- err_cnt++;
- } else {
- pr_cont("PASS\n");
- pass_cnt++;
- }
- }
- pr_info("%s: Summary: %d PASSED, %d FAILED\n", __func__,
- pass_cnt, err_cnt);
- return err_cnt ? -EINVAL : 0;
- }
- static __init int test_bpf(void)
- {
- int i, err_cnt = 0, pass_cnt = 0;
- int jit_cnt = 0, run_cnt = 0;
- for (i = 0; i < ARRAY_SIZE(tests); i++) {
- struct bpf_prog *fp;
- int err;
- cond_resched();
- if (exclude_test(i))
- continue;
- pr_info("#%d %s ", i, tests[i].descr);
- if (tests[i].fill_helper &&
- tests[i].fill_helper(&tests[i]) < 0) {
- pr_cont("FAIL to prog_fill\n");
- continue;
- }
- fp = generate_filter(i, &err);
- if (tests[i].fill_helper) {
- kfree(tests[i].u.ptr.insns);
- tests[i].u.ptr.insns = NULL;
- }
- if (fp == NULL) {
- if (err == 0) {
- pass_cnt++;
- continue;
- }
- err_cnt++;
- continue;
- }
- pr_cont("jited:%u ", fp->jited);
- run_cnt++;
- if (fp->jited)
- jit_cnt++;
- err = run_one(fp, &tests[i]);
- release_filter(fp, i);
- if (err) {
- pr_cont("FAIL (%d times)\n", err);
- err_cnt++;
- } else {
- pr_cont("PASS\n");
- pass_cnt++;
- }
- }
- pr_info("Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
- pass_cnt, err_cnt, jit_cnt, run_cnt);
- return err_cnt ? -EINVAL : 0;
- }
- struct tail_call_test {
- const char *descr;
- struct bpf_insn insns[MAX_INSNS];
- int flags;
- int result;
- int stack_depth;
- };
- /* Flags that can be passed to tail call test cases */
- #define FLAG_NEED_STATE BIT(0)
- #define FLAG_RESULT_IN_STATE BIT(1)
- /*
- * Magic marker used in test snippets for tail calls below.
- * BPF_LD/MOV to R2 and R2 with this immediate value is replaced
- * with the proper values by the test runner.
- */
- #define TAIL_CALL_MARKER 0x7a11ca11
- /* Special offset to indicate a NULL call target */
- #define TAIL_CALL_NULL 0x7fff
- /* Special offset to indicate an out-of-range index */
- #define TAIL_CALL_INVALID 0x7ffe
- #define TAIL_CALL(offset) \
- BPF_LD_IMM64(R2, TAIL_CALL_MARKER), \
- BPF_RAW_INSN(BPF_ALU | BPF_MOV | BPF_K, R3, 0, \
- offset, TAIL_CALL_MARKER), \
- BPF_JMP_IMM(BPF_TAIL_CALL, 0, 0, 0)
- /*
- * A test function to be called from a BPF program, clobbering a lot of
- * CPU registers in the process. A JITed BPF program calling this function
- * must save and restore any caller-saved registers it uses for internal
- * state, for example the current tail call count.
- */
- BPF_CALL_1(bpf_test_func, u64, arg)
- {
- char buf[64];
- long a = 0;
- long b = 1;
- long c = 2;
- long d = 3;
- long e = 4;
- long f = 5;
- long g = 6;
- long h = 7;
- return snprintf(buf, sizeof(buf),
- "%ld %lu %lx %ld %lu %lx %ld %lu %x",
- a, b, c, d, e, f, g, h, (int)arg);
- }
- #define BPF_FUNC_test_func __BPF_FUNC_MAX_ID
- /*
- * Tail call tests. Each test case may call any other test in the table,
- * including itself, specified as a relative index offset from the calling
- * test. The index TAIL_CALL_NULL can be used to specify a NULL target
- * function to test the JIT error path. Similarly, the index TAIL_CALL_INVALID
- * results in a target index that is out of range.
- */
- static struct tail_call_test tail_call_tests[] = {
- {
- "Tail call leaf",
- .insns = {
- BPF_ALU64_REG(BPF_MOV, R0, R1),
- BPF_ALU64_IMM(BPF_ADD, R0, 1),
- BPF_EXIT_INSN(),
- },
- .result = 1,
- },
- {
- "Tail call 2",
- .insns = {
- BPF_ALU64_IMM(BPF_ADD, R1, 2),
- TAIL_CALL(-1),
- BPF_ALU64_IMM(BPF_MOV, R0, -1),
- BPF_EXIT_INSN(),
- },
- .result = 3,
- },
- {
- "Tail call 3",
- .insns = {
- BPF_ALU64_IMM(BPF_ADD, R1, 3),
- TAIL_CALL(-1),
- BPF_ALU64_IMM(BPF_MOV, R0, -1),
- BPF_EXIT_INSN(),
- },
- .result = 6,
- },
- {
- "Tail call 4",
- .insns = {
- BPF_ALU64_IMM(BPF_ADD, R1, 4),
- TAIL_CALL(-1),
- BPF_ALU64_IMM(BPF_MOV, R0, -1),
- BPF_EXIT_INSN(),
- },
- .result = 10,
- },
- {
- "Tail call load/store leaf",
- .insns = {
- BPF_ALU64_IMM(BPF_MOV, R1, 1),
- BPF_ALU64_IMM(BPF_MOV, R2, 2),
- BPF_ALU64_REG(BPF_MOV, R3, BPF_REG_FP),
- BPF_STX_MEM(BPF_DW, R3, R1, -8),
- BPF_STX_MEM(BPF_DW, R3, R2, -16),
- BPF_LDX_MEM(BPF_DW, R0, BPF_REG_FP, -8),
- BPF_JMP_REG(BPF_JNE, R0, R1, 3),
- BPF_LDX_MEM(BPF_DW, R0, BPF_REG_FP, -16),
- BPF_JMP_REG(BPF_JNE, R0, R2, 1),
- BPF_ALU64_IMM(BPF_MOV, R0, 0),
- BPF_EXIT_INSN(),
- },
- .result = 0,
- .stack_depth = 32,
- },
- {
- "Tail call load/store",
- .insns = {
- BPF_ALU64_IMM(BPF_MOV, R0, 3),
- BPF_STX_MEM(BPF_DW, BPF_REG_FP, R0, -8),
- TAIL_CALL(-1),
- BPF_ALU64_IMM(BPF_MOV, R0, -1),
- BPF_EXIT_INSN(),
- },
- .result = 0,
- .stack_depth = 16,
- },
- {
- "Tail call error path, max count reached",
- .insns = {
- BPF_LDX_MEM(BPF_W, R2, R1, 0),
- BPF_ALU64_IMM(BPF_ADD, R2, 1),
- BPF_STX_MEM(BPF_W, R1, R2, 0),
- TAIL_CALL(0),
- BPF_EXIT_INSN(),
- },
- .flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
- .result = (MAX_TAIL_CALL_CNT + 1) * MAX_TESTRUNS,
- },
- {
- "Tail call count preserved across function calls",
- .insns = {
- BPF_LDX_MEM(BPF_W, R2, R1, 0),
- BPF_ALU64_IMM(BPF_ADD, R2, 1),
- BPF_STX_MEM(BPF_W, R1, R2, 0),
- BPF_STX_MEM(BPF_DW, R10, R1, -8),
- BPF_CALL_REL(BPF_FUNC_get_numa_node_id),
- BPF_CALL_REL(BPF_FUNC_ktime_get_ns),
- BPF_CALL_REL(BPF_FUNC_ktime_get_boot_ns),
- BPF_CALL_REL(BPF_FUNC_ktime_get_coarse_ns),
- BPF_CALL_REL(BPF_FUNC_jiffies64),
- BPF_CALL_REL(BPF_FUNC_test_func),
- BPF_LDX_MEM(BPF_DW, R1, R10, -8),
- BPF_ALU32_REG(BPF_MOV, R0, R1),
- TAIL_CALL(0),
- BPF_EXIT_INSN(),
- },
- .stack_depth = 8,
- .flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
- .result = (MAX_TAIL_CALL_CNT + 1) * MAX_TESTRUNS,
- },
- {
- "Tail call error path, NULL target",
- .insns = {
- BPF_LDX_MEM(BPF_W, R2, R1, 0),
- BPF_ALU64_IMM(BPF_ADD, R2, 1),
- BPF_STX_MEM(BPF_W, R1, R2, 0),
- TAIL_CALL(TAIL_CALL_NULL),
- BPF_EXIT_INSN(),
- },
- .flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
- .result = MAX_TESTRUNS,
- },
- {
- "Tail call error path, index out of range",
- .insns = {
- BPF_LDX_MEM(BPF_W, R2, R1, 0),
- BPF_ALU64_IMM(BPF_ADD, R2, 1),
- BPF_STX_MEM(BPF_W, R1, R2, 0),
- TAIL_CALL(TAIL_CALL_INVALID),
- BPF_EXIT_INSN(),
- },
- .flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
- .result = MAX_TESTRUNS,
- },
- };
- static void __init destroy_tail_call_tests(struct bpf_array *progs)
- {
- int i;
- for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++)
- if (progs->ptrs[i])
- bpf_prog_free(progs->ptrs[i]);
- kfree(progs);
- }
- static __init int prepare_tail_call_tests(struct bpf_array **pprogs)
- {
- int ntests = ARRAY_SIZE(tail_call_tests);
- struct bpf_array *progs;
- int which, err;
- /* Allocate the table of programs to be used for tall calls */
- progs = kzalloc(sizeof(*progs) + (ntests + 1) * sizeof(progs->ptrs[0]),
- GFP_KERNEL);
- if (!progs)
- goto out_nomem;
- /* Create all eBPF programs and populate the table */
- for (which = 0; which < ntests; which++) {
- struct tail_call_test *test = &tail_call_tests[which];
- struct bpf_prog *fp;
- int len, i;
- /* Compute the number of program instructions */
- for (len = 0; len < MAX_INSNS; len++) {
- struct bpf_insn *insn = &test->insns[len];
- if (len < MAX_INSNS - 1 &&
- insn->code == (BPF_LD | BPF_DW | BPF_IMM))
- len++;
- if (insn->code == 0)
- break;
- }
- /* Allocate and initialize the program */
- fp = bpf_prog_alloc(bpf_prog_size(len), 0);
- if (!fp)
- goto out_nomem;
- fp->len = len;
- fp->type = BPF_PROG_TYPE_SOCKET_FILTER;
- fp->aux->stack_depth = test->stack_depth;
- memcpy(fp->insnsi, test->insns, len * sizeof(struct bpf_insn));
- /* Relocate runtime tail call offsets and addresses */
- for (i = 0; i < len; i++) {
- struct bpf_insn *insn = &fp->insnsi[i];
- long addr = 0;
- switch (insn->code) {
- case BPF_LD | BPF_DW | BPF_IMM:
- if (insn->imm != TAIL_CALL_MARKER)
- break;
- insn[0].imm = (u32)(long)progs;
- insn[1].imm = ((u64)(long)progs) >> 32;
- break;
- case BPF_ALU | BPF_MOV | BPF_K:
- if (insn->imm != TAIL_CALL_MARKER)
- break;
- if (insn->off == TAIL_CALL_NULL)
- insn->imm = ntests;
- else if (insn->off == TAIL_CALL_INVALID)
- insn->imm = ntests + 1;
- else
- insn->imm = which + insn->off;
- insn->off = 0;
- break;
- case BPF_JMP | BPF_CALL:
- if (insn->src_reg != BPF_PSEUDO_CALL)
- break;
- switch (insn->imm) {
- case BPF_FUNC_get_numa_node_id:
- addr = (long)&numa_node_id;
- break;
- case BPF_FUNC_ktime_get_ns:
- addr = (long)&ktime_get_ns;
- break;
- case BPF_FUNC_ktime_get_boot_ns:
- addr = (long)&ktime_get_boot_fast_ns;
- break;
- case BPF_FUNC_ktime_get_coarse_ns:
- addr = (long)&ktime_get_coarse_ns;
- break;
- case BPF_FUNC_jiffies64:
- addr = (long)&get_jiffies_64;
- break;
- case BPF_FUNC_test_func:
- addr = (long)&bpf_test_func;
- break;
- default:
- err = -EFAULT;
- goto out_err;
- }
- *insn = BPF_EMIT_CALL(addr);
- if ((long)__bpf_call_base + insn->imm != addr)
- *insn = BPF_JMP_A(0); /* Skip: NOP */
- break;
- }
- }
- fp = bpf_prog_select_runtime(fp, &err);
- if (err)
- goto out_err;
- progs->ptrs[which] = fp;
- }
- /* The last entry contains a NULL program pointer */
- progs->map.max_entries = ntests + 1;
- *pprogs = progs;
- return 0;
- out_nomem:
- err = -ENOMEM;
- out_err:
- if (progs)
- destroy_tail_call_tests(progs);
- return err;
- }
- static __init int test_tail_calls(struct bpf_array *progs)
- {
- int i, err_cnt = 0, pass_cnt = 0;
- int jit_cnt = 0, run_cnt = 0;
- for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++) {
- struct tail_call_test *test = &tail_call_tests[i];
- struct bpf_prog *fp = progs->ptrs[i];
- int *data = NULL;
- int state = 0;
- u64 duration;
- int ret;
- cond_resched();
- if (exclude_test(i))
- continue;
- pr_info("#%d %s ", i, test->descr);
- if (!fp) {
- err_cnt++;
- continue;
- }
- pr_cont("jited:%u ", fp->jited);
- run_cnt++;
- if (fp->jited)
- jit_cnt++;
- if (test->flags & FLAG_NEED_STATE)
- data = &state;
- ret = __run_one(fp, data, MAX_TESTRUNS, &duration);
- if (test->flags & FLAG_RESULT_IN_STATE)
- ret = state;
- if (ret == test->result) {
- pr_cont("%lld PASS", duration);
- pass_cnt++;
- } else {
- pr_cont("ret %d != %d FAIL", ret, test->result);
- err_cnt++;
- }
- }
- pr_info("%s: Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
- __func__, pass_cnt, err_cnt, jit_cnt, run_cnt);
- return err_cnt ? -EINVAL : 0;
- }
- static char test_suite[32];
- module_param_string(test_suite, test_suite, sizeof(test_suite), 0);
- static __init int find_test_index(const char *test_name)
- {
- int i;
- if (!strcmp(test_suite, "test_bpf")) {
- for (i = 0; i < ARRAY_SIZE(tests); i++) {
- if (!strcmp(tests[i].descr, test_name))
- return i;
- }
- }
- if (!strcmp(test_suite, "test_tail_calls")) {
- for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++) {
- if (!strcmp(tail_call_tests[i].descr, test_name))
- return i;
- }
- }
- if (!strcmp(test_suite, "test_skb_segment")) {
- for (i = 0; i < ARRAY_SIZE(skb_segment_tests); i++) {
- if (!strcmp(skb_segment_tests[i].descr, test_name))
- return i;
- }
- }
- return -1;
- }
- static __init int prepare_test_range(void)
- {
- int valid_range;
- if (!strcmp(test_suite, "test_bpf"))
- valid_range = ARRAY_SIZE(tests);
- else if (!strcmp(test_suite, "test_tail_calls"))
- valid_range = ARRAY_SIZE(tail_call_tests);
- else if (!strcmp(test_suite, "test_skb_segment"))
- valid_range = ARRAY_SIZE(skb_segment_tests);
- else
- return 0;
- if (test_id >= 0) {
- /*
- * if a test_id was specified, use test_range to
- * cover only that test.
- */
- if (test_id >= valid_range) {
- pr_err("test_bpf: invalid test_id specified for '%s' suite.\n",
- test_suite);
- return -EINVAL;
- }
- test_range[0] = test_id;
- test_range[1] = test_id;
- } else if (*test_name) {
- /*
- * if a test_name was specified, find it and setup
- * test_range to cover only that test.
- */
- int idx = find_test_index(test_name);
- if (idx < 0) {
- pr_err("test_bpf: no test named '%s' found for '%s' suite.\n",
- test_name, test_suite);
- return -EINVAL;
- }
- test_range[0] = idx;
- test_range[1] = idx;
- } else if (test_range[0] != 0 || test_range[1] != INT_MAX) {
- /*
- * check that the supplied test_range is valid.
- */
- if (test_range[0] < 0 || test_range[1] >= valid_range) {
- pr_err("test_bpf: test_range is out of bound for '%s' suite.\n",
- test_suite);
- return -EINVAL;
- }
- if (test_range[1] < test_range[0]) {
- pr_err("test_bpf: test_range is ending before it starts.\n");
- return -EINVAL;
- }
- }
- return 0;
- }
- static int __init test_bpf_init(void)
- {
- struct bpf_array *progs = NULL;
- int ret;
- if (strlen(test_suite) &&
- strcmp(test_suite, "test_bpf") &&
- strcmp(test_suite, "test_tail_calls") &&
- strcmp(test_suite, "test_skb_segment")) {
- pr_err("test_bpf: invalid test_suite '%s' specified.\n", test_suite);
- return -EINVAL;
- }
- /*
- * if test_suite is not specified, but test_id, test_name or test_range
- * is specified, set 'test_bpf' as the default test suite.
- */
- if (!strlen(test_suite) &&
- (test_id != -1 || strlen(test_name) ||
- (test_range[0] != 0 || test_range[1] != INT_MAX))) {
- pr_info("test_bpf: set 'test_bpf' as the default test_suite.\n");
- strscpy(test_suite, "test_bpf", sizeof(test_suite));
- }
- ret = prepare_test_range();
- if (ret < 0)
- return ret;
- if (!strlen(test_suite) || !strcmp(test_suite, "test_bpf")) {
- ret = test_bpf();
- if (ret)
- return ret;
- }
- if (!strlen(test_suite) || !strcmp(test_suite, "test_tail_calls")) {
- ret = prepare_tail_call_tests(&progs);
- if (ret)
- return ret;
- ret = test_tail_calls(progs);
- destroy_tail_call_tests(progs);
- if (ret)
- return ret;
- }
- if (!strlen(test_suite) || !strcmp(test_suite, "test_skb_segment"))
- return test_skb_segment();
- return 0;
- }
- static void __exit test_bpf_exit(void)
- {
- }
- module_init(test_bpf_init);
- module_exit(test_bpf_exit);
- MODULE_LICENSE("GPL");
|