Linux kernel mirror (for testing)
git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel
os
linux
1{
2 "bounds checks mixing signed and unsigned, positive bounds",
3 .insns = {
4 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
5 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
6 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
7 BPF_LD_MAP_FD(BPF_REG_1, 0),
8 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
9 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 7),
10 BPF_ST_MEM(BPF_DW, BPF_REG_10, -16, -8),
11 BPF_LDX_MEM(BPF_DW, BPF_REG_1, BPF_REG_10, -16),
12 BPF_MOV64_IMM(BPF_REG_2, 2),
13 BPF_JMP_REG(BPF_JGE, BPF_REG_2, BPF_REG_1, 3),
14 BPF_JMP_IMM(BPF_JSGT, BPF_REG_1, 4, 2),
15 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
16 BPF_ST_MEM(BPF_B, BPF_REG_0, 0, 0),
17 BPF_MOV64_IMM(BPF_REG_0, 0),
18 BPF_EXIT_INSN(),
19 },
20 .fixup_map_hash_8b = { 3 },
21 .errstr = "unbounded min value",
22 .result = REJECT,
23},
24{
25 "bounds checks mixing signed and unsigned",
26 .insns = {
27 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
28 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
29 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
30 BPF_LD_MAP_FD(BPF_REG_1, 0),
31 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
32 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 7),
33 BPF_ST_MEM(BPF_DW, BPF_REG_10, -16, -8),
34 BPF_LDX_MEM(BPF_DW, BPF_REG_1, BPF_REG_10, -16),
35 BPF_MOV64_IMM(BPF_REG_2, -1),
36 BPF_JMP_REG(BPF_JGT, BPF_REG_1, BPF_REG_2, 3),
37 BPF_JMP_IMM(BPF_JSGT, BPF_REG_1, 1, 2),
38 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
39 BPF_ST_MEM(BPF_B, BPF_REG_0, 0, 0),
40 BPF_MOV64_IMM(BPF_REG_0, 0),
41 BPF_EXIT_INSN(),
42 },
43 .fixup_map_hash_8b = { 3 },
44 .errstr = "unbounded min value",
45 .result = REJECT,
46},
47{
48 "bounds checks mixing signed and unsigned, variant 2",
49 .insns = {
50 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
51 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
52 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
53 BPF_LD_MAP_FD(BPF_REG_1, 0),
54 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
55 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 9),
56 BPF_ST_MEM(BPF_DW, BPF_REG_10, -16, -8),
57 BPF_LDX_MEM(BPF_DW, BPF_REG_1, BPF_REG_10, -16),
58 BPF_MOV64_IMM(BPF_REG_2, -1),
59 BPF_JMP_REG(BPF_JGT, BPF_REG_1, BPF_REG_2, 5),
60 BPF_MOV64_IMM(BPF_REG_8, 0),
61 BPF_ALU64_REG(BPF_ADD, BPF_REG_8, BPF_REG_1),
62 BPF_JMP_IMM(BPF_JSGT, BPF_REG_8, 1, 2),
63 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_8),
64 BPF_ST_MEM(BPF_B, BPF_REG_8, 0, 0),
65 BPF_MOV64_IMM(BPF_REG_0, 0),
66 BPF_EXIT_INSN(),
67 },
68 .fixup_map_hash_8b = { 3 },
69 .errstr = "unbounded min value",
70 .result = REJECT,
71},
72{
73 "bounds checks mixing signed and unsigned, variant 3",
74 .insns = {
75 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
76 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
77 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
78 BPF_LD_MAP_FD(BPF_REG_1, 0),
79 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
80 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 8),
81 BPF_ST_MEM(BPF_DW, BPF_REG_10, -16, -8),
82 BPF_LDX_MEM(BPF_DW, BPF_REG_1, BPF_REG_10, -16),
83 BPF_MOV64_IMM(BPF_REG_2, -1),
84 BPF_JMP_REG(BPF_JGT, BPF_REG_1, BPF_REG_2, 4),
85 BPF_MOV64_REG(BPF_REG_8, BPF_REG_1),
86 BPF_JMP_IMM(BPF_JSGT, BPF_REG_8, 1, 2),
87 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_8),
88 BPF_ST_MEM(BPF_B, BPF_REG_8, 0, 0),
89 BPF_MOV64_IMM(BPF_REG_0, 0),
90 BPF_EXIT_INSN(),
91 },
92 .fixup_map_hash_8b = { 3 },
93 .errstr = "unbounded min value",
94 .result = REJECT,
95},
96{
97 "bounds checks mixing signed and unsigned, variant 4",
98 .insns = {
99 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
100 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
101 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
102 BPF_LD_MAP_FD(BPF_REG_1, 0),
103 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
104 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 7),
105 BPF_ST_MEM(BPF_DW, BPF_REG_10, -16, -8),
106 BPF_LDX_MEM(BPF_DW, BPF_REG_1, BPF_REG_10, -16),
107 BPF_MOV64_IMM(BPF_REG_2, 1),
108 BPF_ALU64_REG(BPF_AND, BPF_REG_1, BPF_REG_2),
109 BPF_JMP_IMM(BPF_JSGT, BPF_REG_1, 1, 2),
110 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
111 BPF_ST_MEM(BPF_B, BPF_REG_0, 0, 0),
112 BPF_MOV64_IMM(BPF_REG_0, 0),
113 BPF_EXIT_INSN(),
114 },
115 .fixup_map_hash_8b = { 3 },
116 .result = ACCEPT,
117},
118{
119 "bounds checks mixing signed and unsigned, variant 5",
120 .insns = {
121 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
122 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
123 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
124 BPF_LD_MAP_FD(BPF_REG_1, 0),
125 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
126 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 9),
127 BPF_ST_MEM(BPF_DW, BPF_REG_10, -16, -8),
128 BPF_LDX_MEM(BPF_DW, BPF_REG_1, BPF_REG_10, -16),
129 BPF_MOV64_IMM(BPF_REG_2, -1),
130 BPF_JMP_REG(BPF_JGT, BPF_REG_1, BPF_REG_2, 5),
131 BPF_JMP_IMM(BPF_JSGT, BPF_REG_1, 1, 4),
132 BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, 4),
133 BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_1),
134 BPF_ST_MEM(BPF_B, BPF_REG_0, 0, 0),
135 BPF_MOV64_IMM(BPF_REG_0, 0),
136 BPF_EXIT_INSN(),
137 },
138 .fixup_map_hash_8b = { 3 },
139 .errstr = "unbounded min value",
140 .result = REJECT,
141},
142{
143 "bounds checks mixing signed and unsigned, variant 6",
144 .insns = {
145 BPF_MOV64_IMM(BPF_REG_2, 0),
146 BPF_MOV64_REG(BPF_REG_3, BPF_REG_10),
147 BPF_ALU64_IMM(BPF_ADD, BPF_REG_3, -512),
148 BPF_ST_MEM(BPF_DW, BPF_REG_10, -16, -8),
149 BPF_LDX_MEM(BPF_DW, BPF_REG_4, BPF_REG_10, -16),
150 BPF_MOV64_IMM(BPF_REG_6, -1),
151 BPF_JMP_REG(BPF_JGT, BPF_REG_4, BPF_REG_6, 5),
152 BPF_JMP_IMM(BPF_JSGT, BPF_REG_4, 1, 4),
153 BPF_ALU64_IMM(BPF_ADD, BPF_REG_4, 1),
154 BPF_MOV64_IMM(BPF_REG_5, 0),
155 BPF_ST_MEM(BPF_H, BPF_REG_10, -512, 0),
156 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_skb_load_bytes),
157 BPF_MOV64_IMM(BPF_REG_0, 0),
158 BPF_EXIT_INSN(),
159 },
160 .errstr = "R4 min value is negative, either use unsigned",
161 .result = REJECT,
162},
163{
164 "bounds checks mixing signed and unsigned, variant 7",
165 .insns = {
166 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
167 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
168 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
169 BPF_LD_MAP_FD(BPF_REG_1, 0),
170 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
171 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 7),
172 BPF_ST_MEM(BPF_DW, BPF_REG_10, -16, -8),
173 BPF_LDX_MEM(BPF_DW, BPF_REG_1, BPF_REG_10, -16),
174 BPF_MOV64_IMM(BPF_REG_2, 1024 * 1024 * 1024),
175 BPF_JMP_REG(BPF_JGT, BPF_REG_1, BPF_REG_2, 3),
176 BPF_JMP_IMM(BPF_JSGT, BPF_REG_1, 1, 2),
177 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
178 BPF_ST_MEM(BPF_B, BPF_REG_0, 0, 0),
179 BPF_MOV64_IMM(BPF_REG_0, 0),
180 BPF_EXIT_INSN(),
181 },
182 .fixup_map_hash_8b = { 3 },
183 .result = ACCEPT,
184},
185{
186 "bounds checks mixing signed and unsigned, variant 8",
187 .insns = {
188 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
189 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
190 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
191 BPF_LD_MAP_FD(BPF_REG_1, 0),
192 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
193 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 9),
194 BPF_ST_MEM(BPF_DW, BPF_REG_10, -16, -8),
195 BPF_LDX_MEM(BPF_DW, BPF_REG_1, BPF_REG_10, -16),
196 BPF_MOV64_IMM(BPF_REG_2, -1),
197 BPF_JMP_REG(BPF_JGT, BPF_REG_2, BPF_REG_1, 2),
198 BPF_MOV64_IMM(BPF_REG_0, 0),
199 BPF_EXIT_INSN(),
200 BPF_JMP_IMM(BPF_JSGT, BPF_REG_1, 1, 2),
201 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
202 BPF_ST_MEM(BPF_B, BPF_REG_0, 0, 0),
203 BPF_MOV64_IMM(BPF_REG_0, 0),
204 BPF_EXIT_INSN(),
205 },
206 .fixup_map_hash_8b = { 3 },
207 .errstr = "unbounded min value",
208 .result = REJECT,
209},
210{
211 "bounds checks mixing signed and unsigned, variant 9",
212 .insns = {
213 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
214 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
215 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
216 BPF_LD_MAP_FD(BPF_REG_1, 0),
217 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
218 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 10),
219 BPF_ST_MEM(BPF_DW, BPF_REG_10, -16, -8),
220 BPF_LDX_MEM(BPF_DW, BPF_REG_1, BPF_REG_10, -16),
221 BPF_LD_IMM64(BPF_REG_2, -9223372036854775808ULL),
222 BPF_JMP_REG(BPF_JGT, BPF_REG_2, BPF_REG_1, 2),
223 BPF_MOV64_IMM(BPF_REG_0, 0),
224 BPF_EXIT_INSN(),
225 BPF_JMP_IMM(BPF_JSGT, BPF_REG_1, 1, 2),
226 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
227 BPF_ST_MEM(BPF_B, BPF_REG_0, 0, 0),
228 BPF_MOV64_IMM(BPF_REG_0, 0),
229 BPF_EXIT_INSN(),
230 },
231 .fixup_map_hash_8b = { 3 },
232 .result = ACCEPT,
233},
234{
235 "bounds checks mixing signed and unsigned, variant 10",
236 .insns = {
237 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
238 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
239 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
240 BPF_LD_MAP_FD(BPF_REG_1, 0),
241 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
242 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 9),
243 BPF_ST_MEM(BPF_DW, BPF_REG_10, -16, -8),
244 BPF_LDX_MEM(BPF_DW, BPF_REG_1, BPF_REG_10, -16),
245 BPF_MOV64_IMM(BPF_REG_2, 0),
246 BPF_JMP_REG(BPF_JGT, BPF_REG_2, BPF_REG_1, 2),
247 BPF_MOV64_IMM(BPF_REG_0, 0),
248 BPF_EXIT_INSN(),
249 BPF_JMP_IMM(BPF_JSGT, BPF_REG_1, 1, 2),
250 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
251 BPF_ST_MEM(BPF_B, BPF_REG_0, 0, 0),
252 BPF_MOV64_IMM(BPF_REG_0, 0),
253 BPF_EXIT_INSN(),
254 },
255 .fixup_map_hash_8b = { 3 },
256 .errstr = "unbounded min value",
257 .result = REJECT,
258},
259{
260 "bounds checks mixing signed and unsigned, variant 11",
261 .insns = {
262 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
263 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
264 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
265 BPF_LD_MAP_FD(BPF_REG_1, 0),
266 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
267 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 9),
268 BPF_ST_MEM(BPF_DW, BPF_REG_10, -16, -8),
269 BPF_LDX_MEM(BPF_DW, BPF_REG_1, BPF_REG_10, -16),
270 BPF_MOV64_IMM(BPF_REG_2, -1),
271 BPF_JMP_REG(BPF_JGE, BPF_REG_2, BPF_REG_1, 2),
272 /* Dead branch. */
273 BPF_MOV64_IMM(BPF_REG_0, 0),
274 BPF_EXIT_INSN(),
275 BPF_JMP_IMM(BPF_JSGT, BPF_REG_1, 1, 2),
276 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
277 BPF_ST_MEM(BPF_B, BPF_REG_0, 0, 0),
278 BPF_MOV64_IMM(BPF_REG_0, 0),
279 BPF_EXIT_INSN(),
280 },
281 .fixup_map_hash_8b = { 3 },
282 .errstr = "unbounded min value",
283 .result = REJECT,
284},
285{
286 "bounds checks mixing signed and unsigned, variant 12",
287 .insns = {
288 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
289 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
290 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
291 BPF_LD_MAP_FD(BPF_REG_1, 0),
292 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
293 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 9),
294 BPF_ST_MEM(BPF_DW, BPF_REG_10, -16, -8),
295 BPF_LDX_MEM(BPF_DW, BPF_REG_1, BPF_REG_10, -16),
296 BPF_MOV64_IMM(BPF_REG_2, -6),
297 BPF_JMP_REG(BPF_JGE, BPF_REG_2, BPF_REG_1, 2),
298 BPF_MOV64_IMM(BPF_REG_0, 0),
299 BPF_EXIT_INSN(),
300 BPF_JMP_IMM(BPF_JSGT, BPF_REG_1, 1, 2),
301 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
302 BPF_ST_MEM(BPF_B, BPF_REG_0, 0, 0),
303 BPF_MOV64_IMM(BPF_REG_0, 0),
304 BPF_EXIT_INSN(),
305 },
306 .fixup_map_hash_8b = { 3 },
307 .errstr = "unbounded min value",
308 .result = REJECT,
309},
310{
311 "bounds checks mixing signed and unsigned, variant 13",
312 .insns = {
313 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
314 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
315 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
316 BPF_LD_MAP_FD(BPF_REG_1, 0),
317 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
318 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 6),
319 BPF_ST_MEM(BPF_DW, BPF_REG_10, -16, -8),
320 BPF_LDX_MEM(BPF_DW, BPF_REG_1, BPF_REG_10, -16),
321 BPF_MOV64_IMM(BPF_REG_2, 2),
322 BPF_JMP_REG(BPF_JGE, BPF_REG_2, BPF_REG_1, 2),
323 BPF_MOV64_IMM(BPF_REG_7, 1),
324 BPF_JMP_IMM(BPF_JSGT, BPF_REG_7, 0, 2),
325 BPF_MOV64_IMM(BPF_REG_0, 0),
326 BPF_EXIT_INSN(),
327 BPF_ALU64_REG(BPF_ADD, BPF_REG_7, BPF_REG_1),
328 BPF_JMP_IMM(BPF_JSGT, BPF_REG_7, 4, 2),
329 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_7),
330 BPF_ST_MEM(BPF_B, BPF_REG_0, 0, 0),
331 BPF_MOV64_IMM(BPF_REG_0, 0),
332 BPF_EXIT_INSN(),
333 },
334 .fixup_map_hash_8b = { 3 },
335 .errstr = "unbounded min value",
336 .result = REJECT,
337},
338{
339 "bounds checks mixing signed and unsigned, variant 14",
340 .insns = {
341 BPF_LDX_MEM(BPF_W, BPF_REG_9, BPF_REG_1,
342 offsetof(struct __sk_buff, mark)),
343 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
344 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
345 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
346 BPF_LD_MAP_FD(BPF_REG_1, 0),
347 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
348 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 8),
349 BPF_ST_MEM(BPF_DW, BPF_REG_10, -16, -8),
350 BPF_LDX_MEM(BPF_DW, BPF_REG_1, BPF_REG_10, -16),
351 BPF_MOV64_IMM(BPF_REG_2, -1),
352 BPF_MOV64_IMM(BPF_REG_8, 2),
353 BPF_JMP_IMM(BPF_JEQ, BPF_REG_9, 42, 6),
354 BPF_JMP_REG(BPF_JSGT, BPF_REG_8, BPF_REG_1, 3),
355 BPF_JMP_IMM(BPF_JSGT, BPF_REG_1, 1, 2),
356 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
357 BPF_ST_MEM(BPF_B, BPF_REG_0, 0, 0),
358 BPF_MOV64_IMM(BPF_REG_0, 0),
359 BPF_EXIT_INSN(),
360 BPF_JMP_REG(BPF_JGT, BPF_REG_1, BPF_REG_2, -3),
361 BPF_JMP_IMM(BPF_JA, 0, 0, -7),
362 },
363 .fixup_map_hash_8b = { 4 },
364 .errstr = "unbounded min value",
365 .result = REJECT,
366},
367{
368 "bounds checks mixing signed and unsigned, variant 15",
369 .insns = {
370 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
371 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
372 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
373 BPF_LD_MAP_FD(BPF_REG_1, 0),
374 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
375 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 4),
376 BPF_ST_MEM(BPF_DW, BPF_REG_10, -16, -8),
377 BPF_LDX_MEM(BPF_DW, BPF_REG_1, BPF_REG_10, -16),
378 BPF_MOV64_IMM(BPF_REG_2, -6),
379 BPF_JMP_REG(BPF_JGE, BPF_REG_2, BPF_REG_1, 2),
380 BPF_MOV64_IMM(BPF_REG_0, 0),
381 BPF_EXIT_INSN(),
382 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
383 BPF_JMP_IMM(BPF_JGT, BPF_REG_0, 1, 2),
384 BPF_MOV64_IMM(BPF_REG_0, 0),
385 BPF_EXIT_INSN(),
386 BPF_ST_MEM(BPF_B, BPF_REG_0, 0, 0),
387 BPF_MOV64_IMM(BPF_REG_0, 0),
388 BPF_EXIT_INSN(),
389 },
390 .fixup_map_hash_8b = { 3 },
391 .errstr = "unbounded min value",
392 .result = REJECT,
393},