2 "subtraction bounds (map value) variant 1",
4 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
5 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
6 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
7 BPF_LD_MAP_FD(BPF_REG_1, 0),
8 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
9 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 9),
10 BPF_LDX_MEM(BPF_B, BPF_REG_1, BPF_REG_0, 0),
11 BPF_JMP_IMM(BPF_JGT, BPF_REG_1, 0xff, 7),
12 BPF_LDX_MEM(BPF_B, BPF_REG_3, BPF_REG_0, 1),
13 BPF_JMP_IMM(BPF_JGT, BPF_REG_3, 0xff, 5),
14 BPF_ALU64_REG(BPF_SUB, BPF_REG_1, BPF_REG_3),
15 BPF_ALU64_IMM(BPF_RSH, BPF_REG_1, 56),
16 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
17 BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_0, 0),
19 BPF_MOV64_IMM(BPF_REG_0, 0),
22 .fixup_map_hash_8b = { 3 },
23 .errstr = "R0 max value is outside of the array range",
27 "subtraction bounds (map value) variant 2",
29 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
30 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
31 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
32 BPF_LD_MAP_FD(BPF_REG_1, 0),
33 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
34 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 8),
35 BPF_LDX_MEM(BPF_B, BPF_REG_1, BPF_REG_0, 0),
36 BPF_JMP_IMM(BPF_JGT, BPF_REG_1, 0xff, 6),
37 BPF_LDX_MEM(BPF_B, BPF_REG_3, BPF_REG_0, 1),
38 BPF_JMP_IMM(BPF_JGT, BPF_REG_3, 0xff, 4),
39 BPF_ALU64_REG(BPF_SUB, BPF_REG_1, BPF_REG_3),
40 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
41 BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_0, 0),
43 BPF_MOV64_IMM(BPF_REG_0, 0),
46 .fixup_map_hash_8b = { 3 },
47 .errstr = "R0 min value is negative, either use unsigned index or do a if (index >=0) check.",
48 .errstr_unpriv = "R1 has unknown scalar with mixed signed bounds",
52 "check subtraction on pointers for unpriv",
54 BPF_MOV64_IMM(BPF_REG_0, 0),
55 BPF_LD_MAP_FD(BPF_REG_ARG1, 0),
56 BPF_MOV64_REG(BPF_REG_ARG2, BPF_REG_FP),
57 BPF_ALU64_IMM(BPF_ADD, BPF_REG_ARG2, -8),
58 BPF_ST_MEM(BPF_DW, BPF_REG_ARG2, 0, 9),
59 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
60 BPF_MOV64_REG(BPF_REG_9, BPF_REG_FP),
61 BPF_ALU64_REG(BPF_SUB, BPF_REG_9, BPF_REG_0),
62 BPF_LD_MAP_FD(BPF_REG_ARG1, 0),
63 BPF_MOV64_REG(BPF_REG_ARG2, BPF_REG_FP),
64 BPF_ALU64_IMM(BPF_ADD, BPF_REG_ARG2, -8),
65 BPF_ST_MEM(BPF_DW, BPF_REG_ARG2, 0, 0),
66 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
67 BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0, 1),
69 BPF_STX_MEM(BPF_DW, BPF_REG_0, BPF_REG_9, 0),
70 BPF_MOV64_IMM(BPF_REG_0, 0),
73 .fixup_map_hash_8b = { 1, 9 },
75 .result_unpriv = REJECT,
76 .errstr_unpriv = "R9 pointer -= pointer prohibited",
79 "bounds check based on zero-extended MOV",
81 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
82 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
83 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
84 BPF_LD_MAP_FD(BPF_REG_1, 0),
85 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
86 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 4),
87 /* r2 = 0x0000'0000'ffff'ffff */
88 BPF_MOV32_IMM(BPF_REG_2, 0xffffffff),
90 BPF_ALU64_IMM(BPF_RSH, BPF_REG_2, 32),
92 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_2),
93 /* access at offset 0 */
94 BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_0, 0),
96 BPF_MOV64_IMM(BPF_REG_0, 0),
99 .fixup_map_hash_8b = { 3 },
103 "bounds check based on sign-extended MOV. test1",
105 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
106 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
107 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
108 BPF_LD_MAP_FD(BPF_REG_1, 0),
109 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
110 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 4),
111 /* r2 = 0xffff'ffff'ffff'ffff */
112 BPF_MOV64_IMM(BPF_REG_2, 0xffffffff),
113 /* r2 = 0xffff'ffff */
114 BPF_ALU64_IMM(BPF_RSH, BPF_REG_2, 32),
115 /* r0 = <oob pointer> */
116 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_2),
117 /* access to OOB pointer */
118 BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_0, 0),
120 BPF_MOV64_IMM(BPF_REG_0, 0),
123 .fixup_map_hash_8b = { 3 },
124 .errstr = "map_value pointer and 4294967295",
128 "bounds check based on sign-extended MOV. test2",
130 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
131 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
132 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
133 BPF_LD_MAP_FD(BPF_REG_1, 0),
134 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
135 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 4),
136 /* r2 = 0xffff'ffff'ffff'ffff */
137 BPF_MOV64_IMM(BPF_REG_2, 0xffffffff),
138 /* r2 = 0xfff'ffff */
139 BPF_ALU64_IMM(BPF_RSH, BPF_REG_2, 36),
140 /* r0 = <oob pointer> */
141 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_2),
142 /* access to OOB pointer */
143 BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_0, 0),
145 BPF_MOV64_IMM(BPF_REG_0, 0),
148 .fixup_map_hash_8b = { 3 },
149 .errstr = "R0 min value is outside of the array range",
153 "bounds check based on reg_off + var_off + insn_off. test1",
155 BPF_LDX_MEM(BPF_W, BPF_REG_6, BPF_REG_1,
156 offsetof(struct __sk_buff, mark)),
157 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
158 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
159 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
160 BPF_LD_MAP_FD(BPF_REG_1, 0),
161 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
162 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 4),
163 BPF_ALU64_IMM(BPF_AND, BPF_REG_6, 1),
164 BPF_ALU64_IMM(BPF_ADD, BPF_REG_6, (1 << 29) - 1),
165 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_6),
166 BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, (1 << 29) - 1),
167 BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_0, 3),
168 BPF_MOV64_IMM(BPF_REG_0, 0),
171 .fixup_map_hash_8b = { 4 },
172 .errstr = "value_size=8 off=1073741825",
174 .prog_type = BPF_PROG_TYPE_SCHED_CLS,
177 "bounds check based on reg_off + var_off + insn_off. test2",
179 BPF_LDX_MEM(BPF_W, BPF_REG_6, BPF_REG_1,
180 offsetof(struct __sk_buff, mark)),
181 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
182 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
183 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
184 BPF_LD_MAP_FD(BPF_REG_1, 0),
185 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
186 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 4),
187 BPF_ALU64_IMM(BPF_AND, BPF_REG_6, 1),
188 BPF_ALU64_IMM(BPF_ADD, BPF_REG_6, (1 << 30) - 1),
189 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_6),
190 BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, (1 << 29) - 1),
191 BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_0, 3),
192 BPF_MOV64_IMM(BPF_REG_0, 0),
195 .fixup_map_hash_8b = { 4 },
196 .errstr = "value 1073741823",
198 .prog_type = BPF_PROG_TYPE_SCHED_CLS,
201 "bounds check after truncation of non-boundary-crossing range",
203 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
204 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
205 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
206 BPF_LD_MAP_FD(BPF_REG_1, 0),
207 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
208 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 9),
209 /* r1 = [0x00, 0xff] */
210 BPF_LDX_MEM(BPF_B, BPF_REG_1, BPF_REG_0, 0),
211 BPF_MOV64_IMM(BPF_REG_2, 1),
212 /* r2 = 0x10'0000'0000 */
213 BPF_ALU64_IMM(BPF_LSH, BPF_REG_2, 36),
214 /* r1 = [0x10'0000'0000, 0x10'0000'00ff] */
215 BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_2),
216 /* r1 = [0x10'7fff'ffff, 0x10'8000'00fe] */
217 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, 0x7fffffff),
218 /* r1 = [0x00, 0xff] */
219 BPF_ALU32_IMM(BPF_SUB, BPF_REG_1, 0x7fffffff),
221 BPF_ALU64_IMM(BPF_RSH, BPF_REG_1, 8),
223 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
224 /* access at offset 0 */
225 BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_0, 0),
227 BPF_MOV64_IMM(BPF_REG_0, 0),
230 .fixup_map_hash_8b = { 3 },
234 "bounds check after truncation of boundary-crossing range (1)",
236 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
237 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
238 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
239 BPF_LD_MAP_FD(BPF_REG_1, 0),
240 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
241 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 8),
242 /* r1 = [0x00, 0xff] */
243 BPF_LDX_MEM(BPF_B, BPF_REG_1, BPF_REG_0, 0),
244 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, 0xffffff80 >> 1),
245 /* r1 = [0xffff'ff80, 0x1'0000'007f] */
246 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, 0xffffff80 >> 1),
247 /* r1 = [0xffff'ff80, 0xffff'ffff] or
248 * [0x0000'0000, 0x0000'007f]
250 BPF_ALU32_IMM(BPF_ADD, BPF_REG_1, 0),
251 BPF_ALU64_IMM(BPF_SUB, BPF_REG_1, 0xffffff80 >> 1),
252 /* r1 = [0x00, 0xff] or
253 * [0xffff'ffff'0000'0080, 0xffff'ffff'ffff'ffff]
255 BPF_ALU64_IMM(BPF_SUB, BPF_REG_1, 0xffffff80 >> 1),
256 /* error on OOB pointer computation */
257 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
259 BPF_MOV64_IMM(BPF_REG_0, 0),
262 .fixup_map_hash_8b = { 3 },
263 /* not actually fully unbounded, but the bound is very high */
264 .errstr_unpriv = "R1 has unknown scalar with mixed signed bounds, pointer arithmetic with it prohibited for !root",
265 .result_unpriv = REJECT,
266 .errstr = "value -4294967168 makes map_value pointer be out of bounds",
270 "bounds check after truncation of boundary-crossing range (2)",
272 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
273 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
274 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
275 BPF_LD_MAP_FD(BPF_REG_1, 0),
276 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
277 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 8),
278 /* r1 = [0x00, 0xff] */
279 BPF_LDX_MEM(BPF_B, BPF_REG_1, BPF_REG_0, 0),
280 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, 0xffffff80 >> 1),
281 /* r1 = [0xffff'ff80, 0x1'0000'007f] */
282 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, 0xffffff80 >> 1),
283 /* r1 = [0xffff'ff80, 0xffff'ffff] or
284 * [0x0000'0000, 0x0000'007f]
285 * difference to previous test: truncation via MOV32
288 BPF_MOV32_REG(BPF_REG_1, BPF_REG_1),
289 BPF_ALU64_IMM(BPF_SUB, BPF_REG_1, 0xffffff80 >> 1),
290 /* r1 = [0x00, 0xff] or
291 * [0xffff'ffff'0000'0080, 0xffff'ffff'ffff'ffff]
293 BPF_ALU64_IMM(BPF_SUB, BPF_REG_1, 0xffffff80 >> 1),
294 /* error on OOB pointer computation */
295 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
297 BPF_MOV64_IMM(BPF_REG_0, 0),
300 .fixup_map_hash_8b = { 3 },
301 /* not actually fully unbounded, but the bound is very high */
302 .errstr_unpriv = "R1 has unknown scalar with mixed signed bounds, pointer arithmetic with it prohibited for !root",
303 .result_unpriv = REJECT,
304 .errstr = "value -4294967168 makes map_value pointer be out of bounds",
308 "bounds check after wrapping 32-bit addition",
310 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
311 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
312 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
313 BPF_LD_MAP_FD(BPF_REG_1, 0),
314 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
315 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 5),
316 /* r1 = 0x7fff'ffff */
317 BPF_MOV64_IMM(BPF_REG_1, 0x7fffffff),
318 /* r1 = 0xffff'fffe */
319 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, 0x7fffffff),
321 BPF_ALU32_IMM(BPF_ADD, BPF_REG_1, 2),
323 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
324 /* access at offset 0 */
325 BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_0, 0),
327 BPF_MOV64_IMM(BPF_REG_0, 0),
330 .fixup_map_hash_8b = { 3 },
334 "bounds check after shift with oversized count operand",
336 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
337 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
338 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
339 BPF_LD_MAP_FD(BPF_REG_1, 0),
340 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
341 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 6),
342 BPF_MOV64_IMM(BPF_REG_2, 32),
343 BPF_MOV64_IMM(BPF_REG_1, 1),
344 /* r1 = (u32)1 << (u32)32 = ? */
345 BPF_ALU32_REG(BPF_LSH, BPF_REG_1, BPF_REG_2),
346 /* r1 = [0x0000, 0xffff] */
347 BPF_ALU64_IMM(BPF_AND, BPF_REG_1, 0xffff),
348 /* computes unknown pointer, potentially OOB */
349 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
350 /* potentially OOB access */
351 BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_0, 0),
353 BPF_MOV64_IMM(BPF_REG_0, 0),
356 .fixup_map_hash_8b = { 3 },
357 .errstr = "R0 max value is outside of the array range",
361 "bounds check after right shift of maybe-negative number",
363 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
364 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
365 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
366 BPF_LD_MAP_FD(BPF_REG_1, 0),
367 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
368 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 6),
369 /* r1 = [0x00, 0xff] */
370 BPF_LDX_MEM(BPF_B, BPF_REG_1, BPF_REG_0, 0),
371 /* r1 = [-0x01, 0xfe] */
372 BPF_ALU64_IMM(BPF_SUB, BPF_REG_1, 1),
373 /* r1 = 0 or 0xff'ffff'ffff'ffff */
374 BPF_ALU64_IMM(BPF_RSH, BPF_REG_1, 8),
375 /* r1 = 0 or 0xffff'ffff'ffff */
376 BPF_ALU64_IMM(BPF_RSH, BPF_REG_1, 8),
377 /* computes unknown pointer, potentially OOB */
378 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
379 /* potentially OOB access */
380 BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_0, 0),
382 BPF_MOV64_IMM(BPF_REG_0, 0),
385 .fixup_map_hash_8b = { 3 },
386 .errstr = "R0 unbounded memory access",
390 "bounds check after 32-bit right shift with 64-bit input",
392 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
393 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
394 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
395 BPF_LD_MAP_FD(BPF_REG_1, 0),
396 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
397 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 6),
399 BPF_MOV64_IMM(BPF_REG_1, 2),
401 BPF_ALU64_IMM(BPF_LSH, BPF_REG_1, 31),
402 /* r1 = 0 (NOT 2!) */
403 BPF_ALU32_IMM(BPF_RSH, BPF_REG_1, 31),
404 /* r1 = 0xffff'fffe (NOT 0!) */
405 BPF_ALU32_IMM(BPF_SUB, BPF_REG_1, 2),
406 /* error on computing OOB pointer */
407 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
409 BPF_MOV64_IMM(BPF_REG_0, 0),
412 .fixup_map_hash_8b = { 3 },
413 .errstr = "math between map_value pointer and 4294967294 is not allowed",
417 "bounds check map access with off+size signed 32bit overflow. test1",
419 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
420 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
421 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
422 BPF_LD_MAP_FD(BPF_REG_1, 0),
423 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
424 BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0, 1),
426 BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, 0x7ffffffe),
427 BPF_LDX_MEM(BPF_DW, BPF_REG_0, BPF_REG_0, 0),
431 .fixup_map_hash_8b = { 3 },
432 .errstr = "map_value pointer and 2147483646",
436 "bounds check map access with off+size signed 32bit overflow. test2",
438 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
439 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
440 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
441 BPF_LD_MAP_FD(BPF_REG_1, 0),
442 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
443 BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0, 1),
445 BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, 0x1fffffff),
446 BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, 0x1fffffff),
447 BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, 0x1fffffff),
448 BPF_LDX_MEM(BPF_DW, BPF_REG_0, BPF_REG_0, 0),
452 .fixup_map_hash_8b = { 3 },
453 .errstr = "pointer offset 1073741822",
454 .errstr_unpriv = "R0 pointer arithmetic of map value goes out of range",
458 "bounds check map access with off+size signed 32bit overflow. test3",
460 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
461 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
462 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
463 BPF_LD_MAP_FD(BPF_REG_1, 0),
464 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
465 BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0, 1),
467 BPF_ALU64_IMM(BPF_SUB, BPF_REG_0, 0x1fffffff),
468 BPF_ALU64_IMM(BPF_SUB, BPF_REG_0, 0x1fffffff),
469 BPF_LDX_MEM(BPF_DW, BPF_REG_0, BPF_REG_0, 2),
473 .fixup_map_hash_8b = { 3 },
474 .errstr = "pointer offset -1073741822",
475 .errstr_unpriv = "R0 pointer arithmetic of map value goes out of range",
479 "bounds check map access with off+size signed 32bit overflow. test4",
481 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
482 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
483 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
484 BPF_LD_MAP_FD(BPF_REG_1, 0),
485 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
486 BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0, 1),
488 BPF_MOV64_IMM(BPF_REG_1, 1000000),
489 BPF_ALU64_IMM(BPF_MUL, BPF_REG_1, 1000000),
490 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
491 BPF_LDX_MEM(BPF_DW, BPF_REG_0, BPF_REG_0, 2),
495 .fixup_map_hash_8b = { 3 },
496 .errstr = "map_value pointer and 1000000000000",
500 "bounds check mixed 32bit and 64bit arithmetic. test1",
502 BPF_MOV64_IMM(BPF_REG_0, 0),
503 BPF_MOV64_IMM(BPF_REG_1, -1),
504 BPF_ALU64_IMM(BPF_LSH, BPF_REG_1, 32),
505 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, 1),
506 /* r1 = 0xffffFFFF00000001 */
507 BPF_JMP32_IMM(BPF_JGT, BPF_REG_1, 1, 3),
508 /* check ALU64 op keeps 32bit bounds */
509 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, 1),
510 BPF_JMP32_IMM(BPF_JGT, BPF_REG_1, 2, 1),
512 /* invalid ldx if bounds are lost above */
513 BPF_LDX_MEM(BPF_DW, BPF_REG_0, BPF_REG_0, -1),
519 "bounds check mixed 32bit and 64bit arithmetic. test2",
521 BPF_MOV64_IMM(BPF_REG_0, 0),
522 BPF_MOV64_IMM(BPF_REG_1, -1),
523 BPF_ALU64_IMM(BPF_LSH, BPF_REG_1, 32),
524 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, 1),
525 /* r1 = 0xffffFFFF00000001 */
526 BPF_MOV64_IMM(BPF_REG_2, 3),
528 BPF_ALU32_IMM(BPF_ADD, BPF_REG_1, 1),
529 /* check ALU32 op zero extends 64bit bounds */
530 BPF_JMP_REG(BPF_JGT, BPF_REG_1, BPF_REG_2, 1),
532 /* invalid ldx if bounds are lost above */
533 BPF_LDX_MEM(BPF_DW, BPF_REG_0, BPF_REG_0, -1),
539 "assigning 32bit bounds to 64bit for wA = 0, wB = wA",
541 BPF_LDX_MEM(BPF_W, BPF_REG_8, BPF_REG_1,
542 offsetof(struct __sk_buff, data_end)),
543 BPF_LDX_MEM(BPF_W, BPF_REG_7, BPF_REG_1,
544 offsetof(struct __sk_buff, data)),
545 BPF_MOV32_IMM(BPF_REG_9, 0),
546 BPF_MOV32_REG(BPF_REG_2, BPF_REG_9),
547 BPF_MOV64_REG(BPF_REG_6, BPF_REG_7),
548 BPF_ALU64_REG(BPF_ADD, BPF_REG_6, BPF_REG_2),
549 BPF_MOV64_REG(BPF_REG_3, BPF_REG_6),
550 BPF_ALU64_IMM(BPF_ADD, BPF_REG_3, 8),
551 BPF_JMP_REG(BPF_JGT, BPF_REG_3, BPF_REG_8, 1),
552 BPF_LDX_MEM(BPF_W, BPF_REG_5, BPF_REG_6, 0),
553 BPF_MOV64_IMM(BPF_REG_0, 0),
556 .prog_type = BPF_PROG_TYPE_SCHED_CLS,
558 .flags = F_NEEDS_EFFICIENT_UNALIGNED_ACCESS,