subreg.c 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533
  1. /* This file contains sub-register zero extension checks for insns defining
  2. * sub-registers, meaning:
  3. * - All insns under BPF_ALU class. Their BPF_ALU32 variants or narrow width
  4. * forms (BPF_END) could define sub-registers.
  5. * - Narrow direct loads, BPF_B/H/W | BPF_LDX.
  6. * - BPF_LD is not exposed to JIT back-ends, so no need for testing.
  7. *
  8. * "get_prandom_u32" is used to initialize low 32-bit of some registers to
  9. * prevent potential optimizations done by verifier or JIT back-ends which could
  10. * optimize register back into constant when range info shows one register is a
  11. * constant.
  12. */
  13. {
  14. "add32 reg zero extend check",
  15. .insns = {
  16. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  17. BPF_MOV64_REG(BPF_REG_1, BPF_REG_0),
  18. BPF_LD_IMM64(BPF_REG_0, 0x100000000ULL),
  19. BPF_ALU32_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
  20. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  21. BPF_EXIT_INSN(),
  22. },
  23. .result = ACCEPT,
  24. .retval = 0,
  25. },
  26. {
  27. "add32 imm zero extend check",
  28. .insns = {
  29. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  30. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  31. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  32. /* An insn could have no effect on the low 32-bit, for example:
  33. * a = a + 0
  34. * a = a | 0
  35. * a = a & -1
  36. * But, they should still zero high 32-bit.
  37. */
  38. BPF_ALU32_IMM(BPF_ADD, BPF_REG_0, 0),
  39. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  40. BPF_MOV64_REG(BPF_REG_6, BPF_REG_0),
  41. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  42. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  43. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  44. BPF_ALU32_IMM(BPF_ADD, BPF_REG_0, -2),
  45. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  46. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6),
  47. BPF_EXIT_INSN(),
  48. },
  49. .result = ACCEPT,
  50. .retval = 0,
  51. },
  52. {
  53. "sub32 reg zero extend check",
  54. .insns = {
  55. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  56. BPF_MOV64_REG(BPF_REG_1, BPF_REG_0),
  57. BPF_LD_IMM64(BPF_REG_0, 0x1ffffffffULL),
  58. BPF_ALU32_REG(BPF_SUB, BPF_REG_0, BPF_REG_1),
  59. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  60. BPF_EXIT_INSN(),
  61. },
  62. .result = ACCEPT,
  63. .retval = 0,
  64. },
  65. {
  66. "sub32 imm zero extend check",
  67. .insns = {
  68. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  69. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  70. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  71. BPF_ALU32_IMM(BPF_SUB, BPF_REG_0, 0),
  72. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  73. BPF_MOV64_REG(BPF_REG_6, BPF_REG_0),
  74. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  75. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  76. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  77. BPF_ALU32_IMM(BPF_SUB, BPF_REG_0, 1),
  78. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  79. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6),
  80. BPF_EXIT_INSN(),
  81. },
  82. .result = ACCEPT,
  83. .retval = 0,
  84. },
  85. {
  86. "mul32 reg zero extend check",
  87. .insns = {
  88. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  89. BPF_MOV64_REG(BPF_REG_1, BPF_REG_0),
  90. BPF_LD_IMM64(BPF_REG_0, 0x100000001ULL),
  91. BPF_ALU32_REG(BPF_MUL, BPF_REG_0, BPF_REG_1),
  92. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  93. BPF_EXIT_INSN(),
  94. },
  95. .result = ACCEPT,
  96. .retval = 0,
  97. },
  98. {
  99. "mul32 imm zero extend check",
  100. .insns = {
  101. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  102. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  103. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  104. BPF_ALU32_IMM(BPF_MUL, BPF_REG_0, 1),
  105. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  106. BPF_MOV64_REG(BPF_REG_6, BPF_REG_0),
  107. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  108. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  109. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  110. BPF_ALU32_IMM(BPF_MUL, BPF_REG_0, -1),
  111. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  112. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6),
  113. BPF_EXIT_INSN(),
  114. },
  115. .result = ACCEPT,
  116. .retval = 0,
  117. },
  118. {
  119. "div32 reg zero extend check",
  120. .insns = {
  121. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  122. BPF_MOV64_REG(BPF_REG_1, BPF_REG_0),
  123. BPF_MOV64_IMM(BPF_REG_0, -1),
  124. BPF_ALU32_REG(BPF_DIV, BPF_REG_0, BPF_REG_1),
  125. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  126. BPF_EXIT_INSN(),
  127. },
  128. .result = ACCEPT,
  129. .retval = 0,
  130. },
  131. {
  132. "div32 imm zero extend check",
  133. .insns = {
  134. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  135. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  136. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  137. BPF_ALU32_IMM(BPF_DIV, BPF_REG_0, 1),
  138. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  139. BPF_MOV64_REG(BPF_REG_6, BPF_REG_0),
  140. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  141. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  142. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  143. BPF_ALU32_IMM(BPF_DIV, BPF_REG_0, 2),
  144. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  145. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6),
  146. BPF_EXIT_INSN(),
  147. },
  148. .result = ACCEPT,
  149. .retval = 0,
  150. },
  151. {
  152. "or32 reg zero extend check",
  153. .insns = {
  154. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  155. BPF_MOV64_REG(BPF_REG_1, BPF_REG_0),
  156. BPF_LD_IMM64(BPF_REG_0, 0x100000001ULL),
  157. BPF_ALU32_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  158. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  159. BPF_EXIT_INSN(),
  160. },
  161. .result = ACCEPT,
  162. .retval = 0,
  163. },
  164. {
  165. "or32 imm zero extend check",
  166. .insns = {
  167. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  168. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  169. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  170. BPF_ALU32_IMM(BPF_OR, BPF_REG_0, 0),
  171. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  172. BPF_MOV64_REG(BPF_REG_6, BPF_REG_0),
  173. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  174. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  175. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  176. BPF_ALU32_IMM(BPF_OR, BPF_REG_0, 1),
  177. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  178. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6),
  179. BPF_EXIT_INSN(),
  180. },
  181. .result = ACCEPT,
  182. .retval = 0,
  183. },
  184. {
  185. "and32 reg zero extend check",
  186. .insns = {
  187. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  188. BPF_LD_IMM64(BPF_REG_1, 0x100000000ULL),
  189. BPF_ALU64_REG(BPF_OR, BPF_REG_1, BPF_REG_0),
  190. BPF_LD_IMM64(BPF_REG_0, 0x1ffffffffULL),
  191. BPF_ALU32_REG(BPF_AND, BPF_REG_0, BPF_REG_1),
  192. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  193. BPF_EXIT_INSN(),
  194. },
  195. .result = ACCEPT,
  196. .retval = 0,
  197. },
  198. {
  199. "and32 imm zero extend check",
  200. .insns = {
  201. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  202. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  203. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  204. BPF_ALU32_IMM(BPF_AND, BPF_REG_0, -1),
  205. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  206. BPF_MOV64_REG(BPF_REG_6, BPF_REG_0),
  207. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  208. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  209. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  210. BPF_ALU32_IMM(BPF_AND, BPF_REG_0, -2),
  211. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  212. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6),
  213. BPF_EXIT_INSN(),
  214. },
  215. .result = ACCEPT,
  216. .retval = 0,
  217. },
  218. {
  219. "lsh32 reg zero extend check",
  220. .insns = {
  221. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  222. BPF_LD_IMM64(BPF_REG_1, 0x100000000ULL),
  223. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  224. BPF_MOV64_IMM(BPF_REG_1, 1),
  225. BPF_ALU32_REG(BPF_LSH, BPF_REG_0, BPF_REG_1),
  226. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  227. BPF_EXIT_INSN(),
  228. },
  229. .result = ACCEPT,
  230. .retval = 0,
  231. },
  232. {
  233. "lsh32 imm zero extend check",
  234. .insns = {
  235. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  236. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  237. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  238. BPF_ALU32_IMM(BPF_LSH, BPF_REG_0, 0),
  239. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  240. BPF_MOV64_REG(BPF_REG_6, BPF_REG_0),
  241. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  242. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  243. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  244. BPF_ALU32_IMM(BPF_LSH, BPF_REG_0, 1),
  245. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  246. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6),
  247. BPF_EXIT_INSN(),
  248. },
  249. .result = ACCEPT,
  250. .retval = 0,
  251. },
  252. {
  253. "rsh32 reg zero extend check",
  254. .insns = {
  255. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  256. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  257. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  258. BPF_MOV64_IMM(BPF_REG_1, 1),
  259. BPF_ALU32_REG(BPF_RSH, BPF_REG_0, BPF_REG_1),
  260. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  261. BPF_EXIT_INSN(),
  262. },
  263. .result = ACCEPT,
  264. .retval = 0,
  265. },
  266. {
  267. "rsh32 imm zero extend check",
  268. .insns = {
  269. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  270. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  271. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  272. BPF_ALU32_IMM(BPF_RSH, BPF_REG_0, 0),
  273. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  274. BPF_MOV64_REG(BPF_REG_6, BPF_REG_0),
  275. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  276. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  277. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  278. BPF_ALU32_IMM(BPF_RSH, BPF_REG_0, 1),
  279. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  280. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6),
  281. BPF_EXIT_INSN(),
  282. },
  283. .result = ACCEPT,
  284. .retval = 0,
  285. },
  286. {
  287. "neg32 reg zero extend check",
  288. .insns = {
  289. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  290. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  291. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  292. BPF_ALU32_IMM(BPF_NEG, BPF_REG_0, 0),
  293. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  294. BPF_EXIT_INSN(),
  295. },
  296. .result = ACCEPT,
  297. .retval = 0,
  298. },
  299. {
  300. "mod32 reg zero extend check",
  301. .insns = {
  302. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  303. BPF_MOV64_REG(BPF_REG_1, BPF_REG_0),
  304. BPF_MOV64_IMM(BPF_REG_0, -1),
  305. BPF_ALU32_REG(BPF_MOD, BPF_REG_0, BPF_REG_1),
  306. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  307. BPF_EXIT_INSN(),
  308. },
  309. .result = ACCEPT,
  310. .retval = 0,
  311. },
  312. {
  313. "mod32 imm zero extend check",
  314. .insns = {
  315. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  316. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  317. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  318. BPF_ALU32_IMM(BPF_MOD, BPF_REG_0, 1),
  319. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  320. BPF_MOV64_REG(BPF_REG_6, BPF_REG_0),
  321. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  322. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  323. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  324. BPF_ALU32_IMM(BPF_MOD, BPF_REG_0, 2),
  325. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  326. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6),
  327. BPF_EXIT_INSN(),
  328. },
  329. .result = ACCEPT,
  330. .retval = 0,
  331. },
  332. {
  333. "xor32 reg zero extend check",
  334. .insns = {
  335. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  336. BPF_MOV64_REG(BPF_REG_1, BPF_REG_0),
  337. BPF_LD_IMM64(BPF_REG_0, 0x100000000ULL),
  338. BPF_ALU32_REG(BPF_XOR, BPF_REG_0, BPF_REG_1),
  339. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  340. BPF_EXIT_INSN(),
  341. },
  342. .result = ACCEPT,
  343. .retval = 0,
  344. },
  345. {
  346. "xor32 imm zero extend check",
  347. .insns = {
  348. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  349. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  350. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  351. BPF_ALU32_IMM(BPF_XOR, BPF_REG_0, 1),
  352. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  353. BPF_EXIT_INSN(),
  354. },
  355. .result = ACCEPT,
  356. .retval = 0,
  357. },
  358. {
  359. "mov32 reg zero extend check",
  360. .insns = {
  361. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  362. BPF_LD_IMM64(BPF_REG_1, 0x100000000ULL),
  363. BPF_ALU64_REG(BPF_OR, BPF_REG_1, BPF_REG_0),
  364. BPF_LD_IMM64(BPF_REG_0, 0x100000000ULL),
  365. BPF_MOV32_REG(BPF_REG_0, BPF_REG_1),
  366. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  367. BPF_EXIT_INSN(),
  368. },
  369. .result = ACCEPT,
  370. .retval = 0,
  371. },
  372. {
  373. "mov32 imm zero extend check",
  374. .insns = {
  375. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  376. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  377. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  378. BPF_MOV32_IMM(BPF_REG_0, 0),
  379. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  380. BPF_MOV64_REG(BPF_REG_6, BPF_REG_0),
  381. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  382. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  383. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  384. BPF_MOV32_IMM(BPF_REG_0, 1),
  385. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  386. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6),
  387. BPF_EXIT_INSN(),
  388. },
  389. .result = ACCEPT,
  390. .retval = 0,
  391. },
  392. {
  393. "arsh32 reg zero extend check",
  394. .insns = {
  395. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  396. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  397. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  398. BPF_MOV64_IMM(BPF_REG_1, 1),
  399. BPF_ALU32_REG(BPF_ARSH, BPF_REG_0, BPF_REG_1),
  400. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  401. BPF_EXIT_INSN(),
  402. },
  403. .result = ACCEPT,
  404. .retval = 0,
  405. },
  406. {
  407. "arsh32 imm zero extend check",
  408. .insns = {
  409. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  410. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  411. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  412. BPF_ALU32_IMM(BPF_ARSH, BPF_REG_0, 0),
  413. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  414. BPF_MOV64_REG(BPF_REG_6, BPF_REG_0),
  415. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  416. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  417. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  418. BPF_ALU32_IMM(BPF_ARSH, BPF_REG_0, 1),
  419. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  420. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6),
  421. BPF_EXIT_INSN(),
  422. },
  423. .result = ACCEPT,
  424. .retval = 0,
  425. },
  426. {
  427. "end16 (to_le) reg zero extend check",
  428. .insns = {
  429. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  430. BPF_MOV64_REG(BPF_REG_6, BPF_REG_0),
  431. BPF_ALU64_IMM(BPF_LSH, BPF_REG_6, 32),
  432. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  433. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6),
  434. BPF_ENDIAN(BPF_TO_LE, BPF_REG_0, 16),
  435. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  436. BPF_EXIT_INSN(),
  437. },
  438. .result = ACCEPT,
  439. .retval = 0,
  440. },
  441. {
  442. "end32 (to_le) reg zero extend check",
  443. .insns = {
  444. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  445. BPF_MOV64_REG(BPF_REG_6, BPF_REG_0),
  446. BPF_ALU64_IMM(BPF_LSH, BPF_REG_6, 32),
  447. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  448. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6),
  449. BPF_ENDIAN(BPF_TO_LE, BPF_REG_0, 32),
  450. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  451. BPF_EXIT_INSN(),
  452. },
  453. .result = ACCEPT,
  454. .retval = 0,
  455. },
  456. {
  457. "end16 (to_be) reg zero extend check",
  458. .insns = {
  459. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  460. BPF_MOV64_REG(BPF_REG_6, BPF_REG_0),
  461. BPF_ALU64_IMM(BPF_LSH, BPF_REG_6, 32),
  462. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  463. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6),
  464. BPF_ENDIAN(BPF_TO_BE, BPF_REG_0, 16),
  465. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  466. BPF_EXIT_INSN(),
  467. },
  468. .result = ACCEPT,
  469. .retval = 0,
  470. },
  471. {
  472. "end32 (to_be) reg zero extend check",
  473. .insns = {
  474. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  475. BPF_MOV64_REG(BPF_REG_6, BPF_REG_0),
  476. BPF_ALU64_IMM(BPF_LSH, BPF_REG_6, 32),
  477. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  478. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6),
  479. BPF_ENDIAN(BPF_TO_BE, BPF_REG_0, 32),
  480. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  481. BPF_EXIT_INSN(),
  482. },
  483. .result = ACCEPT,
  484. .retval = 0,
  485. },
  486. {
  487. "ldx_b zero extend check",
  488. .insns = {
  489. BPF_MOV64_REG(BPF_REG_6, BPF_REG_10),
  490. BPF_ALU64_IMM(BPF_ADD, BPF_REG_6, -4),
  491. BPF_ST_MEM(BPF_W, BPF_REG_6, 0, 0xfaceb00c),
  492. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  493. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  494. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  495. BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_6, 0),
  496. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  497. BPF_EXIT_INSN(),
  498. },
  499. .result = ACCEPT,
  500. .retval = 0,
  501. },
  502. {
  503. "ldx_h zero extend check",
  504. .insns = {
  505. BPF_MOV64_REG(BPF_REG_6, BPF_REG_10),
  506. BPF_ALU64_IMM(BPF_ADD, BPF_REG_6, -4),
  507. BPF_ST_MEM(BPF_W, BPF_REG_6, 0, 0xfaceb00c),
  508. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  509. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  510. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  511. BPF_LDX_MEM(BPF_H, BPF_REG_0, BPF_REG_6, 0),
  512. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  513. BPF_EXIT_INSN(),
  514. },
  515. .result = ACCEPT,
  516. .retval = 0,
  517. },
  518. {
  519. "ldx_w zero extend check",
  520. .insns = {
  521. BPF_MOV64_REG(BPF_REG_6, BPF_REG_10),
  522. BPF_ALU64_IMM(BPF_ADD, BPF_REG_6, -4),
  523. BPF_ST_MEM(BPF_W, BPF_REG_6, 0, 0xfaceb00c),
  524. BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
  525. BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL),
  526. BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
  527. BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_6, 0),
  528. BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
  529. BPF_EXIT_INSN(),
  530. },
  531. .result = ACCEPT,
  532. .retval = 0,
  533. },