| /* |
| * Testsuite for BPF interpreter and BPF JIT compiler |
| * |
| * Copyright (c) 2011-2014 PLUMgrid, http://plumgrid.com |
| * |
| * This program is free software; you can redistribute it and/or |
| * modify it under the terms of version 2 of the GNU General Public |
| * License as published by the Free Software Foundation. |
| * |
| * This program is distributed in the hope that it will be useful, but |
| * WITHOUT ANY WARRANTY; without even the implied warranty of |
| * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
| * General Public License for more details. |
| */ |
| |
| #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt |
| |
| #include <linux/init.h> |
| #include <linux/module.h> |
| #include <linux/filter.h> |
| #include <linux/bpf.h> |
| #include <linux/skbuff.h> |
| #include <linux/netdevice.h> |
| #include <linux/if_vlan.h> |
| #include <linux/random.h> |
| #include <linux/highmem.h> |
| |
| /* General test specific settings */ |
| #define MAX_SUBTESTS 3 |
| #define MAX_TESTRUNS 10000 |
| #define MAX_DATA 128 |
| #define MAX_INSNS 512 |
| #define MAX_K 0xffffFFFF |
| |
| /* Few constants used to init test 'skb' */ |
| #define SKB_TYPE 3 |
| #define SKB_MARK 0x1234aaaa |
| #define SKB_HASH 0x1234aaab |
| #define SKB_QUEUE_MAP 123 |
| #define SKB_VLAN_TCI 0xffff |
| #define SKB_DEV_IFINDEX 577 |
| #define SKB_DEV_TYPE 588 |
| |
| /* Redefine REGs to make tests less verbose */ |
| #define R0 BPF_REG_0 |
| #define R1 BPF_REG_1 |
| #define R2 BPF_REG_2 |
| #define R3 BPF_REG_3 |
| #define R4 BPF_REG_4 |
| #define R5 BPF_REG_5 |
| #define R6 BPF_REG_6 |
| #define R7 BPF_REG_7 |
| #define R8 BPF_REG_8 |
| #define R9 BPF_REG_9 |
| #define R10 BPF_REG_10 |
| |
| /* Flags that can be passed to test cases */ |
| #define FLAG_NO_DATA BIT(0) |
| #define FLAG_EXPECTED_FAIL BIT(1) |
| #define FLAG_SKB_FRAG BIT(2) |
| |
| enum { |
| CLASSIC = BIT(6), /* Old BPF instructions only. */ |
| INTERNAL = BIT(7), /* Extended instruction set. */ |
| }; |
| |
| #define TEST_TYPE_MASK (CLASSIC | INTERNAL) |
| |
| struct bpf_test { |
| const char *descr; |
| union { |
| struct sock_filter insns[MAX_INSNS]; |
| struct bpf_insn insns_int[MAX_INSNS]; |
| struct { |
| void *insns; |
| unsigned int len; |
| } ptr; |
| } u; |
| __u8 aux; |
| __u8 data[MAX_DATA]; |
| struct { |
| int data_size; |
| __u32 result; |
| } test[MAX_SUBTESTS]; |
| int (*fill_helper)(struct bpf_test *self); |
| __u8 frag_data[MAX_DATA]; |
| }; |
| |
| /* Large test cases need separate allocation and fill handler. */ |
| |
| static int bpf_fill_maxinsns1(struct bpf_test *self) |
| { |
| unsigned int len = BPF_MAXINSNS; |
| struct sock_filter *insn; |
| __u32 k = ~0; |
| int i; |
| |
| insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL); |
| if (!insn) |
| return -ENOMEM; |
| |
| for (i = 0; i < len; i++, k--) |
| insn[i] = __BPF_STMT(BPF_RET | BPF_K, k); |
| |
| self->u.ptr.insns = insn; |
| self->u.ptr.len = len; |
| |
| return 0; |
| } |
| |
| static int bpf_fill_maxinsns2(struct bpf_test *self) |
| { |
| unsigned int len = BPF_MAXINSNS; |
| struct sock_filter *insn; |
| int i; |
| |
| insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL); |
| if (!insn) |
| return -ENOMEM; |
| |
| for (i = 0; i < len; i++) |
| insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe); |
| |
| self->u.ptr.insns = insn; |
| self->u.ptr.len = len; |
| |
| return 0; |
| } |
| |
| static int bpf_fill_maxinsns3(struct bpf_test *self) |
| { |
| unsigned int len = BPF_MAXINSNS; |
| struct sock_filter *insn; |
| struct rnd_state rnd; |
| int i; |
| |
| insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL); |
| if (!insn) |
| return -ENOMEM; |
| |
| prandom_seed_state(&rnd, 3141592653589793238ULL); |
| |
| for (i = 0; i < len - 1; i++) { |
| __u32 k = prandom_u32_state(&rnd); |
| |
| insn[i] = __BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, k); |
| } |
| |
| insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0); |
| |
| self->u.ptr.insns = insn; |
| self->u.ptr.len = len; |
| |
| return 0; |
| } |
| |
| static int bpf_fill_maxinsns4(struct bpf_test *self) |
| { |
| unsigned int len = BPF_MAXINSNS + 1; |
| struct sock_filter *insn; |
| int i; |
| |
| insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL); |
| if (!insn) |
| return -ENOMEM; |
| |
| for (i = 0; i < len; i++) |
| insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe); |
| |
| self->u.ptr.insns = insn; |
| self->u.ptr.len = len; |
| |
| return 0; |
| } |
| |
| static int bpf_fill_maxinsns5(struct bpf_test *self) |
| { |
| unsigned int len = BPF_MAXINSNS; |
| struct sock_filter *insn; |
| int i; |
| |
| insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL); |
| if (!insn) |
| return -ENOMEM; |
| |
| insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0); |
| |
| for (i = 1; i < len - 1; i++) |
| insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe); |
| |
| insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab); |
| |
| self->u.ptr.insns = insn; |
| self->u.ptr.len = len; |
| |
| return 0; |
| } |
| |
| static int bpf_fill_maxinsns6(struct bpf_test *self) |
| { |
| unsigned int len = BPF_MAXINSNS; |
| struct sock_filter *insn; |
| int i; |
| |
| insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL); |
| if (!insn) |
| return -ENOMEM; |
| |
| for (i = 0; i < len - 1; i++) |
| insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF + |
| SKF_AD_VLAN_TAG_PRESENT); |
| |
| insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0); |
| |
| self->u.ptr.insns = insn; |
| self->u.ptr.len = len; |
| |
| return 0; |
| } |
| |
| static int bpf_fill_maxinsns7(struct bpf_test *self) |
| { |
| unsigned int len = BPF_MAXINSNS; |
| struct sock_filter *insn; |
| int i; |
| |
| insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL); |
| if (!insn) |
| return -ENOMEM; |
| |
| for (i = 0; i < len - 4; i++) |
| insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF + |
| SKF_AD_CPU); |
| |
| insn[len - 4] = __BPF_STMT(BPF_MISC | BPF_TAX, 0); |
| insn[len - 3] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF + |
| SKF_AD_CPU); |
| insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0); |
| insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0); |
| |
| self->u.ptr.insns = insn; |
| self->u.ptr.len = len; |
| |
| return 0; |
| } |
| |
| static int bpf_fill_maxinsns8(struct bpf_test *self) |
| { |
| unsigned int len = BPF_MAXINSNS; |
| struct sock_filter *insn; |
| int i, jmp_off = len - 3; |
| |
| insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL); |
| if (!insn) |
| return -ENOMEM; |
| |
| insn[0] = __BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff); |
| |
| for (i = 1; i < len - 1; i++) |
| insn[i] = __BPF_JUMP(BPF_JMP | BPF_JGT, 0xffffffff, jmp_off--, 0); |
| |
| insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0); |
| |
| self->u.ptr.insns = insn; |
| self->u.ptr.len = len; |
| |
| return 0; |
| } |
| |
| static int bpf_fill_maxinsns9(struct bpf_test *self) |
| { |
| unsigned int len = BPF_MAXINSNS; |
| struct bpf_insn *insn; |
| int i; |
| |
| insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL); |
| if (!insn) |
| return -ENOMEM; |
| |
| insn[0] = BPF_JMP_IMM(BPF_JA, 0, 0, len - 2); |
| insn[1] = BPF_ALU32_IMM(BPF_MOV, R0, 0xcbababab); |
| insn[2] = BPF_EXIT_INSN(); |
| |
| for (i = 3; i < len - 2; i++) |
| insn[i] = BPF_ALU32_IMM(BPF_MOV, R0, 0xfefefefe); |
| |
| insn[len - 2] = BPF_EXIT_INSN(); |
| insn[len - 1] = BPF_JMP_IMM(BPF_JA, 0, 0, -(len - 1)); |
| |
| self->u.ptr.insns = insn; |
| self->u.ptr.len = len; |
| |
| return 0; |
| } |
| |
| static int bpf_fill_maxinsns10(struct bpf_test *self) |
| { |
| unsigned int len = BPF_MAXINSNS, hlen = len - 2; |
| struct bpf_insn *insn; |
| int i; |
| |
| insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL); |
| if (!insn) |
| return -ENOMEM; |
| |
| for (i = 0; i < hlen / 2; i++) |
| insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 2 - 2 * i); |
| for (i = hlen - 1; i > hlen / 2; i--) |
| insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 1 - 2 * i); |
| |
| insn[hlen / 2] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen / 2 - 1); |
| insn[hlen] = BPF_ALU32_IMM(BPF_MOV, R0, 0xabababac); |
| insn[hlen + 1] = BPF_EXIT_INSN(); |
| |
| self->u.ptr.insns = insn; |
| self->u.ptr.len = len; |
| |
| return 0; |
| } |
| |
| static int __bpf_fill_ja(struct bpf_test *self, unsigned int len, |
| unsigned int plen) |
| { |
| struct sock_filter *insn; |
| unsigned int rlen; |
| int i, j; |
| |
| insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL); |
| if (!insn) |
| return -ENOMEM; |
| |
| rlen = (len % plen) - 1; |
| |
| for (i = 0; i + plen < len; i += plen) |
| for (j = 0; j < plen; j++) |
| insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA, |
| plen - 1 - j, 0, 0); |
| for (j = 0; j < rlen; j++) |
| insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA, rlen - 1 - j, |
| 0, 0); |
| |
| insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xababcbac); |
| |
| self->u.ptr.insns = insn; |
| self->u.ptr.len = len; |
| |
| return 0; |
| } |
| |
| static int bpf_fill_maxinsns11(struct bpf_test *self) |
| { |
| /* Hits 70 passes on x86_64, so cannot get JITed there. */ |
| return __bpf_fill_ja(self, BPF_MAXINSNS, 68); |
| } |
| |
| static int bpf_fill_ja(struct bpf_test *self) |
| { |
| /* Hits exactly 11 passes on x86_64 JIT. */ |
| return __bpf_fill_ja(self, 12, 9); |
| } |
| |
| static int bpf_fill_ld_abs_get_processor_id(struct bpf_test *self) |
| { |
| unsigned int len = BPF_MAXINSNS; |
| struct sock_filter *insn; |
| int i; |
| |
| insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL); |
| if (!insn) |
| return -ENOMEM; |
| |
| for (i = 0; i < len - 1; i += 2) { |
| insn[i] = __BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 0); |
| insn[i + 1] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_CPU); |
| } |
| |
| insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xbee); |
| |
| self->u.ptr.insns = insn; |
| self->u.ptr.len = len; |
| |
| return 0; |
| } |
| |
| #define PUSH_CNT 68 |
| /* test: {skb->data[0], vlan_push} x 68 + {skb->data[0], vlan_pop} x 68 */ |
| static int bpf_fill_ld_abs_vlan_push_pop(struct bpf_test *self) |
| { |
| unsigned int len = BPF_MAXINSNS; |
| struct bpf_insn *insn; |
| int i = 0, j, k = 0; |
| |
| insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL); |
| if (!insn) |
| return -ENOMEM; |
| |
| insn[i++] = BPF_MOV64_REG(R6, R1); |
| loop: |
| for (j = 0; j < PUSH_CNT; j++) { |
| insn[i++] = BPF_LD_ABS(BPF_B, 0); |
| insn[i] = BPF_JMP_IMM(BPF_JNE, R0, 0x34, len - i - 2); |
| i++; |
| insn[i++] = BPF_MOV64_REG(R1, R6); |
| insn[i++] = BPF_MOV64_IMM(R2, 1); |
| insn[i++] = BPF_MOV64_IMM(R3, 2); |
| insn[i++] = BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, |
| bpf_skb_vlan_push_proto.func - __bpf_call_base); |
| insn[i] = BPF_JMP_IMM(BPF_JNE, R0, 0, len - i - 2); |
| i++; |
| } |
| |
| for (j = 0; j < PUSH_CNT; j++) { |
| insn[i++] = BPF_LD_ABS(BPF_B, 0); |
| insn[i] = BPF_JMP_IMM(BPF_JNE, R0, 0x34, len - i - 2); |
| i++; |
| insn[i++] = BPF_MOV64_REG(R1, R6); |
| insn[i++] = BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, |
| bpf_skb_vlan_pop_proto.func - __bpf_call_base); |
| insn[i] = BPF_JMP_IMM(BPF_JNE, R0, 0, len - i - 2); |
| i++; |
| } |
| if (++k < 5) |
| goto loop; |
| |
| for (; i < len - 1; i++) |
| insn[i] = BPF_ALU32_IMM(BPF_MOV, R0, 0xbef); |
| |
| insn[len - 1] = BPF_EXIT_INSN(); |
| |
| self->u.ptr.insns = insn; |
| self->u.ptr.len = len; |
| |
| return 0; |
| } |
| |
| static struct bpf_test tests[] = { |
| { |
| "TAX", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_IMM, 1), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_LD | BPF_IMM, 2), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_ALU | BPF_NEG, 0), /* A == -3 */ |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_LD | BPF_LEN, 0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), /* X == len - 3 */ |
| BPF_STMT(BPF_LD | BPF_B | BPF_IND, 1), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC, |
| { 10, 20, 30, 40, 50 }, |
| { { 2, 10 }, { 3, 20 }, { 4, 30 } }, |
| }, |
| { |
| "TXA", |
| .u.insns = { |
| BPF_STMT(BPF_LDX | BPF_LEN, 0), |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_RET | BPF_A, 0) /* A == len * 2 */ |
| }, |
| CLASSIC, |
| { 10, 20, 30, 40, 50 }, |
| { { 1, 2 }, { 3, 6 }, { 4, 8 } }, |
| }, |
| { |
| "ADD_SUB_MUL_K", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_IMM, 1), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 2), |
| BPF_STMT(BPF_LDX | BPF_IMM, 3), |
| BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0xffffffff), |
| BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 3), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC | FLAG_NO_DATA, |
| { }, |
| { { 0, 0xfffffffd } } |
| }, |
| { |
| "DIV_MOD_KX", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_IMM, 8), |
| BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 2), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff), |
| BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff), |
| BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x70000000), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff), |
| BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff), |
| BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x70000000), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC | FLAG_NO_DATA, |
| { }, |
| { { 0, 0x20000000 } } |
| }, |
| { |
| "AND_OR_LSH_K", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_IMM, 0xff), |
| BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0), |
| BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 27), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_LD | BPF_IMM, 0xf), |
| BPF_STMT(BPF_ALU | BPF_OR | BPF_K, 0xf0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC | FLAG_NO_DATA, |
| { }, |
| { { 0, 0x800000ff }, { 1, 0x800000ff } }, |
| }, |
| { |
| "LD_IMM_0", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_IMM, 0), /* ld #0 */ |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0, 1, 0), |
| BPF_STMT(BPF_RET | BPF_K, 0), |
| BPF_STMT(BPF_RET | BPF_K, 1), |
| }, |
| CLASSIC, |
| { }, |
| { { 1, 1 } }, |
| }, |
| { |
| "LD_IND", |
| .u.insns = { |
| BPF_STMT(BPF_LDX | BPF_LEN, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_IND, MAX_K), |
| BPF_STMT(BPF_RET | BPF_K, 1) |
| }, |
| CLASSIC, |
| { }, |
| { { 1, 0 }, { 10, 0 }, { 60, 0 } }, |
| }, |
| { |
| "LD_ABS", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, 1000), |
| BPF_STMT(BPF_RET | BPF_K, 1) |
| }, |
| CLASSIC, |
| { }, |
| { { 1, 0 }, { 10, 0 }, { 60, 0 } }, |
| }, |
| { |
| "LD_ABS_LL", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF + 1), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC, |
| { 1, 2, 3 }, |
| { { 1, 0 }, { 2, 3 } }, |
| }, |
| { |
| "LD_IND_LL", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_IMM, SKF_LL_OFF - 1), |
| BPF_STMT(BPF_LDX | BPF_LEN, 0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC, |
| { 1, 2, 3, 0xff }, |
| { { 1, 1 }, { 3, 3 }, { 4, 0xff } }, |
| }, |
| { |
| "LD_ABS_NET", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF + 1), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC, |
| { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 }, |
| { { 15, 0 }, { 16, 3 } }, |
| }, |
| { |
| "LD_IND_NET", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_IMM, SKF_NET_OFF - 15), |
| BPF_STMT(BPF_LDX | BPF_LEN, 0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC, |
| { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 }, |
| { { 14, 0 }, { 15, 1 }, { 17, 3 } }, |
| }, |
| { |
| "LD_PKTTYPE", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_PKTTYPE), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0), |
| BPF_STMT(BPF_RET | BPF_K, 1), |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_PKTTYPE), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0), |
| BPF_STMT(BPF_RET | BPF_K, 1), |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_PKTTYPE), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0), |
| BPF_STMT(BPF_RET | BPF_K, 1), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC, |
| { }, |
| { { 1, 3 }, { 10, 3 } }, |
| }, |
| { |
| "LD_MARK", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_MARK), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC, |
| { }, |
| { { 1, SKB_MARK}, { 10, SKB_MARK} }, |
| }, |
| { |
| "LD_RXHASH", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_RXHASH), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC, |
| { }, |
| { { 1, SKB_HASH}, { 10, SKB_HASH} }, |
| }, |
| { |
| "LD_QUEUE", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_QUEUE), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC, |
| { }, |
| { { 1, SKB_QUEUE_MAP }, { 10, SKB_QUEUE_MAP } }, |
| }, |
| { |
| "LD_PROTOCOL", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 1), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 20, 1, 0), |
| BPF_STMT(BPF_RET | BPF_K, 0), |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_PROTOCOL), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 30, 1, 0), |
| BPF_STMT(BPF_RET | BPF_K, 0), |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC, |
| { 10, 20, 30 }, |
| { { 10, ETH_P_IP }, { 100, ETH_P_IP } }, |
| }, |
| { |
| "LD_VLAN_TAG", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_VLAN_TAG), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC, |
| { }, |
| { |
| { 1, SKB_VLAN_TCI & ~VLAN_TAG_PRESENT }, |
| { 10, SKB_VLAN_TCI & ~VLAN_TAG_PRESENT } |
| }, |
| }, |
| { |
| "LD_VLAN_TAG_PRESENT", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_VLAN_TAG_PRESENT), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC, |
| { }, |
| { |
| { 1, !!(SKB_VLAN_TCI & VLAN_TAG_PRESENT) }, |
| { 10, !!(SKB_VLAN_TCI & VLAN_TAG_PRESENT) } |
| }, |
| }, |
| { |
| "LD_IFINDEX", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_IFINDEX), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC, |
| { }, |
| { { 1, SKB_DEV_IFINDEX }, { 10, SKB_DEV_IFINDEX } }, |
| }, |
| { |
| "LD_HATYPE", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_HATYPE), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC, |
| { }, |
| { { 1, SKB_DEV_TYPE }, { 10, SKB_DEV_TYPE } }, |
| }, |
| { |
| "LD_CPU", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_CPU), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_CPU), |
| BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC, |
| { }, |
| { { 1, 0 }, { 10, 0 } }, |
| }, |
| { |
| "LD_NLATTR", |
| .u.insns = { |
| BPF_STMT(BPF_LDX | BPF_IMM, 2), |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_LDX | BPF_IMM, 3), |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_NLATTR), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC, |
| #ifdef __BIG_ENDIAN |
| { 0xff, 0xff, 0, 4, 0, 2, 0, 4, 0, 3 }, |
| #else |
| { 0xff, 0xff, 4, 0, 2, 0, 4, 0, 3, 0 }, |
| #endif |
| { { 4, 0 }, { 20, 6 } }, |
| }, |
| { |
| "LD_NLATTR_NEST", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_IMM, 2), |
| BPF_STMT(BPF_LDX | BPF_IMM, 3), |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_NLATTR_NEST), |
| BPF_STMT(BPF_LD | BPF_IMM, 2), |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_NLATTR_NEST), |
| BPF_STMT(BPF_LD | BPF_IMM, 2), |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_NLATTR_NEST), |
| BPF_STMT(BPF_LD | BPF_IMM, 2), |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_NLATTR_NEST), |
| BPF_STMT(BPF_LD | BPF_IMM, 2), |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_NLATTR_NEST), |
| BPF_STMT(BPF_LD | BPF_IMM, 2), |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_NLATTR_NEST), |
| BPF_STMT(BPF_LD | BPF_IMM, 2), |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_NLATTR_NEST), |
| BPF_STMT(BPF_LD | BPF_IMM, 2), |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_NLATTR_NEST), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC, |
| #ifdef __BIG_ENDIAN |
| { 0xff, 0xff, 0, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3 }, |
| #else |
| { 0xff, 0xff, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3, 0 }, |
| #endif |
| { { 4, 0 }, { 20, 10 } }, |
| }, |
| { |
| "LD_PAYLOAD_OFF", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_PAY_OFFSET), |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_PAY_OFFSET), |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_PAY_OFFSET), |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_PAY_OFFSET), |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_PAY_OFFSET), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC, |
| /* 00:00:00:00:00:00 > 00:00:00:00:00:00, ethtype IPv4 (0x0800), |
| * length 98: 127.0.0.1 > 127.0.0.1: ICMP echo request, |
| * id 9737, seq 1, length 64 |
| */ |
| { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, |
| 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, |
| 0x08, 0x00, |
| 0x45, 0x00, 0x00, 0x54, 0xac, 0x8b, 0x40, 0x00, 0x40, |
| 0x01, 0x90, 0x1b, 0x7f, 0x00, 0x00, 0x01 }, |
| { { 30, 0 }, { 100, 42 } }, |
| }, |
| { |
| "LD_ANC_XOR", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_IMM, 10), |
| BPF_STMT(BPF_LDX | BPF_IMM, 300), |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_ALU_XOR_X), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC, |
| { }, |
| { { 4, 10 ^ 300 }, { 20, 10 ^ 300 } }, |
| }, |
| { |
| "SPILL_FILL", |
| .u.insns = { |
| BPF_STMT(BPF_LDX | BPF_LEN, 0), |
| BPF_STMT(BPF_LD | BPF_IMM, 2), |
| BPF_STMT(BPF_ALU | BPF_RSH, 1), |
| BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0), |
| BPF_STMT(BPF_ST, 1), /* M1 = 1 ^ len */ |
| BPF_STMT(BPF_ALU | BPF_XOR | BPF_K, 0x80000000), |
| BPF_STMT(BPF_ST, 2), /* M2 = 1 ^ len ^ 0x80000000 */ |
| BPF_STMT(BPF_STX, 15), /* M3 = len */ |
| BPF_STMT(BPF_LDX | BPF_MEM, 1), |
| BPF_STMT(BPF_LD | BPF_MEM, 2), |
| BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0), |
| BPF_STMT(BPF_LDX | BPF_MEM, 15), |
| BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC, |
| { }, |
| { { 1, 0x80000001 }, { 2, 0x80000002 }, { 60, 0x80000000 ^ 60 } } |
| }, |
| { |
| "JEQ", |
| .u.insns = { |
| BPF_STMT(BPF_LDX | BPF_LEN, 0), |
| BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 0, 1), |
| BPF_STMT(BPF_RET | BPF_K, 1), |
| BPF_STMT(BPF_RET | BPF_K, MAX_K) |
| }, |
| CLASSIC, |
| { 3, 3, 3, 3, 3 }, |
| { { 1, 0 }, { 3, 1 }, { 4, MAX_K } }, |
| }, |
| { |
| "JGT", |
| .u.insns = { |
| BPF_STMT(BPF_LDX | BPF_LEN, 0), |
| BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2), |
| BPF_JUMP(BPF_JMP | BPF_JGT | BPF_X, 0, 0, 1), |
| BPF_STMT(BPF_RET | BPF_K, 1), |
| BPF_STMT(BPF_RET | BPF_K, MAX_K) |
| }, |
| CLASSIC, |
| { 4, 4, 4, 3, 3 }, |
| { { 2, 0 }, { 3, 1 }, { 4, MAX_K } }, |
| }, |
| { |
| "JGE", |
| .u.insns = { |
| BPF_STMT(BPF_LDX | BPF_LEN, 0), |
| BPF_STMT(BPF_LD | BPF_B | BPF_IND, MAX_K), |
| BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 1, 1, 0), |
| BPF_STMT(BPF_RET | BPF_K, 10), |
| BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 2, 1, 0), |
| BPF_STMT(BPF_RET | BPF_K, 20), |
| BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 3, 1, 0), |
| BPF_STMT(BPF_RET | BPF_K, 30), |
| BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 4, 1, 0), |
| BPF_STMT(BPF_RET | BPF_K, 40), |
| BPF_STMT(BPF_RET | BPF_K, MAX_K) |
| }, |
| CLASSIC, |
| { 1, 2, 3, 4, 5 }, |
| { { 1, 20 }, { 3, 40 }, { 5, MAX_K } }, |
| }, |
| { |
| "JSET", |
| .u.insns = { |
| BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0), |
| BPF_JUMP(BPF_JMP | BPF_JA, 1, 1, 1), |
| BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0), |
| BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0), |
| BPF_STMT(BPF_LDX | BPF_LEN, 0), |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, 4), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_LD | BPF_W | BPF_IND, 0), |
| BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 1, 0, 1), |
| BPF_STMT(BPF_RET | BPF_K, 10), |
| BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x80000000, 0, 1), |
| BPF_STMT(BPF_RET | BPF_K, 20), |
| BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0), |
| BPF_STMT(BPF_RET | BPF_K, 30), |
| BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0), |
| BPF_STMT(BPF_RET | BPF_K, 30), |
| BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0), |
| BPF_STMT(BPF_RET | BPF_K, 30), |
| BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0), |
| BPF_STMT(BPF_RET | BPF_K, 30), |
| BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0), |
| BPF_STMT(BPF_RET | BPF_K, 30), |
| BPF_STMT(BPF_RET | BPF_K, MAX_K) |
| }, |
| CLASSIC, |
| { 0, 0xAA, 0x55, 1 }, |
| { { 4, 10 }, { 5, 20 }, { 6, MAX_K } }, |
| }, |
| { |
| "tcpdump port 22", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 0, 8), /* IPv6 */ |
| BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 20), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 17), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 54), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 14, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 56), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 12, 13), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0800, 0, 12), /* IPv4 */ |
| BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 8), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20), |
| BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 6, 0), |
| BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14), |
| BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 1), |
| BPF_STMT(BPF_RET | BPF_K, 0xffff), |
| BPF_STMT(BPF_RET | BPF_K, 0), |
| }, |
| CLASSIC, |
| /* 3c:07:54:43:e5:76 > 10:bf:48:d6:43:d6, ethertype IPv4(0x0800) |
| * length 114: 10.1.1.149.49700 > 10.1.2.10.22: Flags [P.], |
| * seq 1305692979:1305693027, ack 3650467037, win 65535, |
| * options [nop,nop,TS val 2502645400 ecr 3971138], length 48 |
| */ |
| { 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6, |
| 0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76, |
| 0x08, 0x00, |
| 0x45, 0x10, 0x00, 0x64, 0x75, 0xb5, |
| 0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */ |
| 0x0a, 0x01, 0x01, 0x95, /* ip src */ |
| 0x0a, 0x01, 0x02, 0x0a, /* ip dst */ |
| 0xc2, 0x24, |
| 0x00, 0x16 /* dst port */ }, |
| { { 10, 0 }, { 30, 0 }, { 100, 65535 } }, |
| }, |
| { |
| "tcpdump complex", |
| .u.insns = { |
| /* tcpdump -nei eth0 'tcp port 22 and (((ip[2:2] - |
| * ((ip[0]&0xf)<<2)) - ((tcp[12]&0xf0)>>2)) != 0) and |
| * (len > 115 or len < 30000000000)' -d |
| */ |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 30, 0), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x800, 0, 29), |
| BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 0, 27), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20), |
| BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 25, 0), |
| BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14), |
| BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 20), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 16), |
| BPF_STMT(BPF_ST, 1), |
| BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 14), |
| BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf), |
| BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 2), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0x5), /* libpcap emits K on TAX */ |
| BPF_STMT(BPF_LD | BPF_MEM, 1), |
| BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0), |
| BPF_STMT(BPF_ST, 5), |
| BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14), |
| BPF_STMT(BPF_LD | BPF_B | BPF_IND, 26), |
| BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0), |
| BPF_STMT(BPF_ALU | BPF_RSH | BPF_K, 2), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0x9), /* libpcap emits K on TAX */ |
| BPF_STMT(BPF_LD | BPF_MEM, 5), |
| BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 4, 0), |
| BPF_STMT(BPF_LD | BPF_LEN, 0), |
| BPF_JUMP(BPF_JMP | BPF_JGT | BPF_K, 0x73, 1, 0), |
| BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 0xfc23ac00, 1, 0), |
| BPF_STMT(BPF_RET | BPF_K, 0xffff), |
| BPF_STMT(BPF_RET | BPF_K, 0), |
| }, |
| CLASSIC, |
| { 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6, |
| 0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76, |
| 0x08, 0x00, |
| 0x45, 0x10, 0x00, 0x64, 0x75, 0xb5, |
| 0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */ |
| 0x0a, 0x01, 0x01, 0x95, /* ip src */ |
| 0x0a, 0x01, 0x02, 0x0a, /* ip dst */ |
| 0xc2, 0x24, |
| 0x00, 0x16 /* dst port */ }, |
| { { 10, 0 }, { 30, 0 }, { 100, 65535 } }, |
| }, |
| { |
| "RET_A", |
| .u.insns = { |
| /* check that unitialized X and A contain zeros */ |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_RET | BPF_A, 0) |
| }, |
| CLASSIC, |
| { }, |
| { {1, 0}, {2, 0} }, |
| }, |
| { |
| "INT: ADD trivial", |
| .u.insns_int = { |
| BPF_ALU64_IMM(BPF_MOV, R1, 1), |
| BPF_ALU64_IMM(BPF_ADD, R1, 2), |
| BPF_ALU64_IMM(BPF_MOV, R2, 3), |
| BPF_ALU64_REG(BPF_SUB, R1, R2), |
| BPF_ALU64_IMM(BPF_ADD, R1, -1), |
| BPF_ALU64_IMM(BPF_MUL, R1, 3), |
| BPF_ALU64_REG(BPF_MOV, R0, R1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 0xfffffffd } } |
| }, |
| { |
| "INT: MUL_X", |
| .u.insns_int = { |
| BPF_ALU64_IMM(BPF_MOV, R0, -1), |
| BPF_ALU64_IMM(BPF_MOV, R1, -1), |
| BPF_ALU64_IMM(BPF_MOV, R2, 3), |
| BPF_ALU64_REG(BPF_MUL, R1, R2), |
| BPF_JMP_IMM(BPF_JEQ, R1, 0xfffffffd, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_IMM(BPF_MOV, R0, 1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 1 } } |
| }, |
| { |
| "INT: MUL_X2", |
| .u.insns_int = { |
| BPF_ALU32_IMM(BPF_MOV, R0, -1), |
| BPF_ALU32_IMM(BPF_MOV, R1, -1), |
| BPF_ALU32_IMM(BPF_MOV, R2, 3), |
| BPF_ALU64_REG(BPF_MUL, R1, R2), |
| BPF_ALU64_IMM(BPF_RSH, R1, 8), |
| BPF_JMP_IMM(BPF_JEQ, R1, 0x2ffffff, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU32_IMM(BPF_MOV, R0, 1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 1 } } |
| }, |
| { |
| "INT: MUL32_X", |
| .u.insns_int = { |
| BPF_ALU32_IMM(BPF_MOV, R0, -1), |
| BPF_ALU64_IMM(BPF_MOV, R1, -1), |
| BPF_ALU32_IMM(BPF_MOV, R2, 3), |
| BPF_ALU32_REG(BPF_MUL, R1, R2), |
| BPF_ALU64_IMM(BPF_RSH, R1, 8), |
| BPF_JMP_IMM(BPF_JEQ, R1, 0xffffff, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU32_IMM(BPF_MOV, R0, 1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 1 } } |
| }, |
| { |
| /* Have to test all register combinations, since |
| * JITing of different registers will produce |
| * different asm code. |
| */ |
| "INT: ADD 64-bit", |
| .u.insns_int = { |
| BPF_ALU64_IMM(BPF_MOV, R0, 0), |
| BPF_ALU64_IMM(BPF_MOV, R1, 1), |
| BPF_ALU64_IMM(BPF_MOV, R2, 2), |
| BPF_ALU64_IMM(BPF_MOV, R3, 3), |
| BPF_ALU64_IMM(BPF_MOV, R4, 4), |
| BPF_ALU64_IMM(BPF_MOV, R5, 5), |
| BPF_ALU64_IMM(BPF_MOV, R6, 6), |
| BPF_ALU64_IMM(BPF_MOV, R7, 7), |
| BPF_ALU64_IMM(BPF_MOV, R8, 8), |
| BPF_ALU64_IMM(BPF_MOV, R9, 9), |
| BPF_ALU64_IMM(BPF_ADD, R0, 20), |
| BPF_ALU64_IMM(BPF_ADD, R1, 20), |
| BPF_ALU64_IMM(BPF_ADD, R2, 20), |
| BPF_ALU64_IMM(BPF_ADD, R3, 20), |
| BPF_ALU64_IMM(BPF_ADD, R4, 20), |
| BPF_ALU64_IMM(BPF_ADD, R5, 20), |
| BPF_ALU64_IMM(BPF_ADD, R6, 20), |
| BPF_ALU64_IMM(BPF_ADD, R7, 20), |
| BPF_ALU64_IMM(BPF_ADD, R8, 20), |
| BPF_ALU64_IMM(BPF_ADD, R9, 20), |
| BPF_ALU64_IMM(BPF_SUB, R0, 10), |
| BPF_ALU64_IMM(BPF_SUB, R1, 10), |
| BPF_ALU64_IMM(BPF_SUB, R2, 10), |
| BPF_ALU64_IMM(BPF_SUB, R3, 10), |
| BPF_ALU64_IMM(BPF_SUB, R4, 10), |
| BPF_ALU64_IMM(BPF_SUB, R5, 10), |
| BPF_ALU64_IMM(BPF_SUB, R6, 10), |
| BPF_ALU64_IMM(BPF_SUB, R7, 10), |
| BPF_ALU64_IMM(BPF_SUB, R8, 10), |
| BPF_ALU64_IMM(BPF_SUB, R9, 10), |
| BPF_ALU64_REG(BPF_ADD, R0, R0), |
| BPF_ALU64_REG(BPF_ADD, R0, R1), |
| BPF_ALU64_REG(BPF_ADD, R0, R2), |
| BPF_ALU64_REG(BPF_ADD, R0, R3), |
| BPF_ALU64_REG(BPF_ADD, R0, R4), |
| BPF_ALU64_REG(BPF_ADD, R0, R5), |
| BPF_ALU64_REG(BPF_ADD, R0, R6), |
| BPF_ALU64_REG(BPF_ADD, R0, R7), |
| BPF_ALU64_REG(BPF_ADD, R0, R8), |
| BPF_ALU64_REG(BPF_ADD, R0, R9), /* R0 == 155 */ |
| BPF_JMP_IMM(BPF_JEQ, R0, 155, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_REG(BPF_ADD, R1, R0), |
| BPF_ALU64_REG(BPF_ADD, R1, R1), |
| BPF_ALU64_REG(BPF_ADD, R1, R2), |
| BPF_ALU64_REG(BPF_ADD, R1, R3), |
| BPF_ALU64_REG(BPF_ADD, R1, R4), |
| BPF_ALU64_REG(BPF_ADD, R1, R5), |
| BPF_ALU64_REG(BPF_ADD, R1, R6), |
| BPF_ALU64_REG(BPF_ADD, R1, R7), |
| BPF_ALU64_REG(BPF_ADD, R1, R8), |
| BPF_ALU64_REG(BPF_ADD, R1, R9), /* R1 == 456 */ |
| BPF_JMP_IMM(BPF_JEQ, R1, 456, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_REG(BPF_ADD, R2, R0), |
| BPF_ALU64_REG(BPF_ADD, R2, R1), |
| BPF_ALU64_REG(BPF_ADD, R2, R2), |
| BPF_ALU64_REG(BPF_ADD, R2, R3), |
| BPF_ALU64_REG(BPF_ADD, R2, R4), |
| BPF_ALU64_REG(BPF_ADD, R2, R5), |
| BPF_ALU64_REG(BPF_ADD, R2, R6), |
| BPF_ALU64_REG(BPF_ADD, R2, R7), |
| BPF_ALU64_REG(BPF_ADD, R2, R8), |
| BPF_ALU64_REG(BPF_ADD, R2, R9), /* R2 == 1358 */ |
| BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_REG(BPF_ADD, R3, R0), |
| BPF_ALU64_REG(BPF_ADD, R3, R1), |
| BPF_ALU64_REG(BPF_ADD, R3, R2), |
| BPF_ALU64_REG(BPF_ADD, R3, R3), |
| BPF_ALU64_REG(BPF_ADD, R3, R4), |
| BPF_ALU64_REG(BPF_ADD, R3, R5), |
| BPF_ALU64_REG(BPF_ADD, R3, R6), |
| BPF_ALU64_REG(BPF_ADD, R3, R7), |
| BPF_ALU64_REG(BPF_ADD, R3, R8), |
| BPF_ALU64_REG(BPF_ADD, R3, R9), /* R3 == 4063 */ |
| BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_REG(BPF_ADD, R4, R0), |
| BPF_ALU64_REG(BPF_ADD, R4, R1), |
| BPF_ALU64_REG(BPF_ADD, R4, R2), |
| BPF_ALU64_REG(BPF_ADD, R4, R3), |
| BPF_ALU64_REG(BPF_ADD, R4, R4), |
| BPF_ALU64_REG(BPF_ADD, R4, R5), |
| BPF_ALU64_REG(BPF_ADD, R4, R6), |
| BPF_ALU64_REG(BPF_ADD, R4, R7), |
| BPF_ALU64_REG(BPF_ADD, R4, R8), |
| BPF_ALU64_REG(BPF_ADD, R4, R9), /* R4 == 12177 */ |
| BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_REG(BPF_ADD, R5, R0), |
| BPF_ALU64_REG(BPF_ADD, R5, R1), |
| BPF_ALU64_REG(BPF_ADD, R5, R2), |
| BPF_ALU64_REG(BPF_ADD, R5, R3), |
| BPF_ALU64_REG(BPF_ADD, R5, R4), |
| BPF_ALU64_REG(BPF_ADD, R5, R5), |
| BPF_ALU64_REG(BPF_ADD, R5, R6), |
| BPF_ALU64_REG(BPF_ADD, R5, R7), |
| BPF_ALU64_REG(BPF_ADD, R5, R8), |
| BPF_ALU64_REG(BPF_ADD, R5, R9), /* R5 == 36518 */ |
| BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_REG(BPF_ADD, R6, R0), |
| BPF_ALU64_REG(BPF_ADD, R6, R1), |
| BPF_ALU64_REG(BPF_ADD, R6, R2), |
| BPF_ALU64_REG(BPF_ADD, R6, R3), |
| BPF_ALU64_REG(BPF_ADD, R6, R4), |
| BPF_ALU64_REG(BPF_ADD, R6, R5), |
| BPF_ALU64_REG(BPF_ADD, R6, R6), |
| BPF_ALU64_REG(BPF_ADD, R6, R7), |
| BPF_ALU64_REG(BPF_ADD, R6, R8), |
| BPF_ALU64_REG(BPF_ADD, R6, R9), /* R6 == 109540 */ |
| BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_REG(BPF_ADD, R7, R0), |
| BPF_ALU64_REG(BPF_ADD, R7, R1), |
| BPF_ALU64_REG(BPF_ADD, R7, R2), |
| BPF_ALU64_REG(BPF_ADD, R7, R3), |
| BPF_ALU64_REG(BPF_ADD, R7, R4), |
| BPF_ALU64_REG(BPF_ADD, R7, R5), |
| BPF_ALU64_REG(BPF_ADD, R7, R6), |
| BPF_ALU64_REG(BPF_ADD, R7, R7), |
| BPF_ALU64_REG(BPF_ADD, R7, R8), |
| BPF_ALU64_REG(BPF_ADD, R7, R9), /* R7 == 328605 */ |
| BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_REG(BPF_ADD, R8, R0), |
| BPF_ALU64_REG(BPF_ADD, R8, R1), |
| BPF_ALU64_REG(BPF_ADD, R8, R2), |
| BPF_ALU64_REG(BPF_ADD, R8, R3), |
| BPF_ALU64_REG(BPF_ADD, R8, R4), |
| BPF_ALU64_REG(BPF_ADD, R8, R5), |
| BPF_ALU64_REG(BPF_ADD, R8, R6), |
| BPF_ALU64_REG(BPF_ADD, R8, R7), |
| BPF_ALU64_REG(BPF_ADD, R8, R8), |
| BPF_ALU64_REG(BPF_ADD, R8, R9), /* R8 == 985799 */ |
| BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_REG(BPF_ADD, R9, R0), |
| BPF_ALU64_REG(BPF_ADD, R9, R1), |
| BPF_ALU64_REG(BPF_ADD, R9, R2), |
| BPF_ALU64_REG(BPF_ADD, R9, R3), |
| BPF_ALU64_REG(BPF_ADD, R9, R4), |
| BPF_ALU64_REG(BPF_ADD, R9, R5), |
| BPF_ALU64_REG(BPF_ADD, R9, R6), |
| BPF_ALU64_REG(BPF_ADD, R9, R7), |
| BPF_ALU64_REG(BPF_ADD, R9, R8), |
| BPF_ALU64_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */ |
| BPF_ALU64_REG(BPF_MOV, R0, R9), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 2957380 } } |
| }, |
| { |
| "INT: ADD 32-bit", |
| .u.insns_int = { |
| BPF_ALU32_IMM(BPF_MOV, R0, 20), |
| BPF_ALU32_IMM(BPF_MOV, R1, 1), |
| BPF_ALU32_IMM(BPF_MOV, R2, 2), |
| BPF_ALU32_IMM(BPF_MOV, R3, 3), |
| BPF_ALU32_IMM(BPF_MOV, R4, 4), |
| BPF_ALU32_IMM(BPF_MOV, R5, 5), |
| BPF_ALU32_IMM(BPF_MOV, R6, 6), |
| BPF_ALU32_IMM(BPF_MOV, R7, 7), |
| BPF_ALU32_IMM(BPF_MOV, R8, 8), |
| BPF_ALU32_IMM(BPF_MOV, R9, 9), |
| BPF_ALU64_IMM(BPF_ADD, R1, 10), |
| BPF_ALU64_IMM(BPF_ADD, R2, 10), |
| BPF_ALU64_IMM(BPF_ADD, R3, 10), |
| BPF_ALU64_IMM(BPF_ADD, R4, 10), |
| BPF_ALU64_IMM(BPF_ADD, R5, 10), |
| BPF_ALU64_IMM(BPF_ADD, R6, 10), |
| BPF_ALU64_IMM(BPF_ADD, R7, 10), |
| BPF_ALU64_IMM(BPF_ADD, R8, 10), |
| BPF_ALU64_IMM(BPF_ADD, R9, 10), |
| BPF_ALU32_REG(BPF_ADD, R0, R1), |
| BPF_ALU32_REG(BPF_ADD, R0, R2), |
| BPF_ALU32_REG(BPF_ADD, R0, R3), |
| BPF_ALU32_REG(BPF_ADD, R0, R4), |
| BPF_ALU32_REG(BPF_ADD, R0, R5), |
| BPF_ALU32_REG(BPF_ADD, R0, R6), |
| BPF_ALU32_REG(BPF_ADD, R0, R7), |
| BPF_ALU32_REG(BPF_ADD, R0, R8), |
| BPF_ALU32_REG(BPF_ADD, R0, R9), /* R0 == 155 */ |
| BPF_JMP_IMM(BPF_JEQ, R0, 155, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU32_REG(BPF_ADD, R1, R0), |
| BPF_ALU32_REG(BPF_ADD, R1, R1), |
| BPF_ALU32_REG(BPF_ADD, R1, R2), |
| BPF_ALU32_REG(BPF_ADD, R1, R3), |
| BPF_ALU32_REG(BPF_ADD, R1, R4), |
| BPF_ALU32_REG(BPF_ADD, R1, R5), |
| BPF_ALU32_REG(BPF_ADD, R1, R6), |
| BPF_ALU32_REG(BPF_ADD, R1, R7), |
| BPF_ALU32_REG(BPF_ADD, R1, R8), |
| BPF_ALU32_REG(BPF_ADD, R1, R9), /* R1 == 456 */ |
| BPF_JMP_IMM(BPF_JEQ, R1, 456, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU32_REG(BPF_ADD, R2, R0), |
| BPF_ALU32_REG(BPF_ADD, R2, R1), |
| BPF_ALU32_REG(BPF_ADD, R2, R2), |
| BPF_ALU32_REG(BPF_ADD, R2, R3), |
| BPF_ALU32_REG(BPF_ADD, R2, R4), |
| BPF_ALU32_REG(BPF_ADD, R2, R5), |
| BPF_ALU32_REG(BPF_ADD, R2, R6), |
| BPF_ALU32_REG(BPF_ADD, R2, R7), |
| BPF_ALU32_REG(BPF_ADD, R2, R8), |
| BPF_ALU32_REG(BPF_ADD, R2, R9), /* R2 == 1358 */ |
| BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU32_REG(BPF_ADD, R3, R0), |
| BPF_ALU32_REG(BPF_ADD, R3, R1), |
| BPF_ALU32_REG(BPF_ADD, R3, R2), |
| BPF_ALU32_REG(BPF_ADD, R3, R3), |
| BPF_ALU32_REG(BPF_ADD, R3, R4), |
| BPF_ALU32_REG(BPF_ADD, R3, R5), |
| BPF_ALU32_REG(BPF_ADD, R3, R6), |
| BPF_ALU32_REG(BPF_ADD, R3, R7), |
| BPF_ALU32_REG(BPF_ADD, R3, R8), |
| BPF_ALU32_REG(BPF_ADD, R3, R9), /* R3 == 4063 */ |
| BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU32_REG(BPF_ADD, R4, R0), |
| BPF_ALU32_REG(BPF_ADD, R4, R1), |
| BPF_ALU32_REG(BPF_ADD, R4, R2), |
| BPF_ALU32_REG(BPF_ADD, R4, R3), |
| BPF_ALU32_REG(BPF_ADD, R4, R4), |
| BPF_ALU32_REG(BPF_ADD, R4, R5), |
| BPF_ALU32_REG(BPF_ADD, R4, R6), |
| BPF_ALU32_REG(BPF_ADD, R4, R7), |
| BPF_ALU32_REG(BPF_ADD, R4, R8), |
| BPF_ALU32_REG(BPF_ADD, R4, R9), /* R4 == 12177 */ |
| BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU32_REG(BPF_ADD, R5, R0), |
| BPF_ALU32_REG(BPF_ADD, R5, R1), |
| BPF_ALU32_REG(BPF_ADD, R5, R2), |
| BPF_ALU32_REG(BPF_ADD, R5, R3), |
| BPF_ALU32_REG(BPF_ADD, R5, R4), |
| BPF_ALU32_REG(BPF_ADD, R5, R5), |
| BPF_ALU32_REG(BPF_ADD, R5, R6), |
| BPF_ALU32_REG(BPF_ADD, R5, R7), |
| BPF_ALU32_REG(BPF_ADD, R5, R8), |
| BPF_ALU32_REG(BPF_ADD, R5, R9), /* R5 == 36518 */ |
| BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU32_REG(BPF_ADD, R6, R0), |
| BPF_ALU32_REG(BPF_ADD, R6, R1), |
| BPF_ALU32_REG(BPF_ADD, R6, R2), |
| BPF_ALU32_REG(BPF_ADD, R6, R3), |
| BPF_ALU32_REG(BPF_ADD, R6, R4), |
| BPF_ALU32_REG(BPF_ADD, R6, R5), |
| BPF_ALU32_REG(BPF_ADD, R6, R6), |
| BPF_ALU32_REG(BPF_ADD, R6, R7), |
| BPF_ALU32_REG(BPF_ADD, R6, R8), |
| BPF_ALU32_REG(BPF_ADD, R6, R9), /* R6 == 109540 */ |
| BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU32_REG(BPF_ADD, R7, R0), |
| BPF_ALU32_REG(BPF_ADD, R7, R1), |
| BPF_ALU32_REG(BPF_ADD, R7, R2), |
| BPF_ALU32_REG(BPF_ADD, R7, R3), |
| BPF_ALU32_REG(BPF_ADD, R7, R4), |
| BPF_ALU32_REG(BPF_ADD, R7, R5), |
| BPF_ALU32_REG(BPF_ADD, R7, R6), |
| BPF_ALU32_REG(BPF_ADD, R7, R7), |
| BPF_ALU32_REG(BPF_ADD, R7, R8), |
| BPF_ALU32_REG(BPF_ADD, R7, R9), /* R7 == 328605 */ |
| BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU32_REG(BPF_ADD, R8, R0), |
| BPF_ALU32_REG(BPF_ADD, R8, R1), |
| BPF_ALU32_REG(BPF_ADD, R8, R2), |
| BPF_ALU32_REG(BPF_ADD, R8, R3), |
| BPF_ALU32_REG(BPF_ADD, R8, R4), |
| BPF_ALU32_REG(BPF_ADD, R8, R5), |
| BPF_ALU32_REG(BPF_ADD, R8, R6), |
| BPF_ALU32_REG(BPF_ADD, R8, R7), |
| BPF_ALU32_REG(BPF_ADD, R8, R8), |
| BPF_ALU32_REG(BPF_ADD, R8, R9), /* R8 == 985799 */ |
| BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU32_REG(BPF_ADD, R9, R0), |
| BPF_ALU32_REG(BPF_ADD, R9, R1), |
| BPF_ALU32_REG(BPF_ADD, R9, R2), |
| BPF_ALU32_REG(BPF_ADD, R9, R3), |
| BPF_ALU32_REG(BPF_ADD, R9, R4), |
| BPF_ALU32_REG(BPF_ADD, R9, R5), |
| BPF_ALU32_REG(BPF_ADD, R9, R6), |
| BPF_ALU32_REG(BPF_ADD, R9, R7), |
| BPF_ALU32_REG(BPF_ADD, R9, R8), |
| BPF_ALU32_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */ |
| BPF_ALU32_REG(BPF_MOV, R0, R9), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 2957380 } } |
| }, |
| { /* Mainly checking JIT here. */ |
| "INT: SUB", |
| .u.insns_int = { |
| BPF_ALU64_IMM(BPF_MOV, R0, 0), |
| BPF_ALU64_IMM(BPF_MOV, R1, 1), |
| BPF_ALU64_IMM(BPF_MOV, R2, 2), |
| BPF_ALU64_IMM(BPF_MOV, R3, 3), |
| BPF_ALU64_IMM(BPF_MOV, R4, 4), |
| BPF_ALU64_IMM(BPF_MOV, R5, 5), |
| BPF_ALU64_IMM(BPF_MOV, R6, 6), |
| BPF_ALU64_IMM(BPF_MOV, R7, 7), |
| BPF_ALU64_IMM(BPF_MOV, R8, 8), |
| BPF_ALU64_IMM(BPF_MOV, R9, 9), |
| BPF_ALU64_REG(BPF_SUB, R0, R0), |
| BPF_ALU64_REG(BPF_SUB, R0, R1), |
| BPF_ALU64_REG(BPF_SUB, R0, R2), |
| BPF_ALU64_REG(BPF_SUB, R0, R3), |
| BPF_ALU64_REG(BPF_SUB, R0, R4), |
| BPF_ALU64_REG(BPF_SUB, R0, R5), |
| BPF_ALU64_REG(BPF_SUB, R0, R6), |
| BPF_ALU64_REG(BPF_SUB, R0, R7), |
| BPF_ALU64_REG(BPF_SUB, R0, R8), |
| BPF_ALU64_REG(BPF_SUB, R0, R9), |
| BPF_ALU64_IMM(BPF_SUB, R0, 10), |
| BPF_JMP_IMM(BPF_JEQ, R0, -55, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_REG(BPF_SUB, R1, R0), |
| BPF_ALU64_REG(BPF_SUB, R1, R2), |
| BPF_ALU64_REG(BPF_SUB, R1, R3), |
| BPF_ALU64_REG(BPF_SUB, R1, R4), |
| BPF_ALU64_REG(BPF_SUB, R1, R5), |
| BPF_ALU64_REG(BPF_SUB, R1, R6), |
| BPF_ALU64_REG(BPF_SUB, R1, R7), |
| BPF_ALU64_REG(BPF_SUB, R1, R8), |
| BPF_ALU64_REG(BPF_SUB, R1, R9), |
| BPF_ALU64_IMM(BPF_SUB, R1, 10), |
| BPF_ALU64_REG(BPF_SUB, R2, R0), |
| BPF_ALU64_REG(BPF_SUB, R2, R1), |
| BPF_ALU64_REG(BPF_SUB, R2, R3), |
| BPF_ALU64_REG(BPF_SUB, R2, R4), |
| BPF_ALU64_REG(BPF_SUB, R2, R5), |
| BPF_ALU64_REG(BPF_SUB, R2, R6), |
| BPF_ALU64_REG(BPF_SUB, R2, R7), |
| BPF_ALU64_REG(BPF_SUB, R2, R8), |
| BPF_ALU64_REG(BPF_SUB, R2, R9), |
| BPF_ALU64_IMM(BPF_SUB, R2, 10), |
| BPF_ALU64_REG(BPF_SUB, R3, R0), |
| BPF_ALU64_REG(BPF_SUB, R3, R1), |
| BPF_ALU64_REG(BPF_SUB, R3, R2), |
| BPF_ALU64_REG(BPF_SUB, R3, R4), |
| BPF_ALU64_REG(BPF_SUB, R3, R5), |
| BPF_ALU64_REG(BPF_SUB, R3, R6), |
| BPF_ALU64_REG(BPF_SUB, R3, R7), |
| BPF_ALU64_REG(BPF_SUB, R3, R8), |
| BPF_ALU64_REG(BPF_SUB, R3, R9), |
| BPF_ALU64_IMM(BPF_SUB, R3, 10), |
| BPF_ALU64_REG(BPF_SUB, R4, R0), |
| BPF_ALU64_REG(BPF_SUB, R4, R1), |
| BPF_ALU64_REG(BPF_SUB, R4, R2), |
| BPF_ALU64_REG(BPF_SUB, R4, R3), |
| BPF_ALU64_REG(BPF_SUB, R4, R5), |
| BPF_ALU64_REG(BPF_SUB, R4, R6), |
| BPF_ALU64_REG(BPF_SUB, R4, R7), |
| BPF_ALU64_REG(BPF_SUB, R4, R8), |
| BPF_ALU64_REG(BPF_SUB, R4, R9), |
| BPF_ALU64_IMM(BPF_SUB, R4, 10), |
| BPF_ALU64_REG(BPF_SUB, R5, R0), |
| BPF_ALU64_REG(BPF_SUB, R5, R1), |
| BPF_ALU64_REG(BPF_SUB, R5, R2), |
| BPF_ALU64_REG(BPF_SUB, R5, R3), |
| BPF_ALU64_REG(BPF_SUB, R5, R4), |
| BPF_ALU64_REG(BPF_SUB, R5, R6), |
| BPF_ALU64_REG(BPF_SUB, R5, R7), |
| BPF_ALU64_REG(BPF_SUB, R5, R8), |
| BPF_ALU64_REG(BPF_SUB, R5, R9), |
| BPF_ALU64_IMM(BPF_SUB, R5, 10), |
| BPF_ALU64_REG(BPF_SUB, R6, R0), |
| BPF_ALU64_REG(BPF_SUB, R6, R1), |
| BPF_ALU64_REG(BPF_SUB, R6, R2), |
| BPF_ALU64_REG(BPF_SUB, R6, R3), |
| BPF_ALU64_REG(BPF_SUB, R6, R4), |
| BPF_ALU64_REG(BPF_SUB, R6, R5), |
| BPF_ALU64_REG(BPF_SUB, R6, R7), |
| BPF_ALU64_REG(BPF_SUB, R6, R8), |
| BPF_ALU64_REG(BPF_SUB, R6, R9), |
| BPF_ALU64_IMM(BPF_SUB, R6, 10), |
| BPF_ALU64_REG(BPF_SUB, R7, R0), |
| BPF_ALU64_REG(BPF_SUB, R7, R1), |
| BPF_ALU64_REG(BPF_SUB, R7, R2), |
| BPF_ALU64_REG(BPF_SUB, R7, R3), |
| BPF_ALU64_REG(BPF_SUB, R7, R4), |
| BPF_ALU64_REG(BPF_SUB, R7, R5), |
| BPF_ALU64_REG(BPF_SUB, R7, R6), |
| BPF_ALU64_REG(BPF_SUB, R7, R8), |
| BPF_ALU64_REG(BPF_SUB, R7, R9), |
| BPF_ALU64_IMM(BPF_SUB, R7, 10), |
| BPF_ALU64_REG(BPF_SUB, R8, R0), |
| BPF_ALU64_REG(BPF_SUB, R8, R1), |
| BPF_ALU64_REG(BPF_SUB, R8, R2), |
| BPF_ALU64_REG(BPF_SUB, R8, R3), |
| BPF_ALU64_REG(BPF_SUB, R8, R4), |
| BPF_ALU64_REG(BPF_SUB, R8, R5), |
| BPF_ALU64_REG(BPF_SUB, R8, R6), |
| BPF_ALU64_REG(BPF_SUB, R8, R7), |
| BPF_ALU64_REG(BPF_SUB, R8, R9), |
| BPF_ALU64_IMM(BPF_SUB, R8, 10), |
| BPF_ALU64_REG(BPF_SUB, R9, R0), |
| BPF_ALU64_REG(BPF_SUB, R9, R1), |
| BPF_ALU64_REG(BPF_SUB, R9, R2), |
| BPF_ALU64_REG(BPF_SUB, R9, R3), |
| BPF_ALU64_REG(BPF_SUB, R9, R4), |
| BPF_ALU64_REG(BPF_SUB, R9, R5), |
| BPF_ALU64_REG(BPF_SUB, R9, R6), |
| BPF_ALU64_REG(BPF_SUB, R9, R7), |
| BPF_ALU64_REG(BPF_SUB, R9, R8), |
| BPF_ALU64_IMM(BPF_SUB, R9, 10), |
| BPF_ALU64_IMM(BPF_SUB, R0, 10), |
| BPF_ALU64_IMM(BPF_NEG, R0, 0), |
| BPF_ALU64_REG(BPF_SUB, R0, R1), |
| BPF_ALU64_REG(BPF_SUB, R0, R2), |
| BPF_ALU64_REG(BPF_SUB, R0, R3), |
| BPF_ALU64_REG(BPF_SUB, R0, R4), |
| BPF_ALU64_REG(BPF_SUB, R0, R5), |
| BPF_ALU64_REG(BPF_SUB, R0, R6), |
| BPF_ALU64_REG(BPF_SUB, R0, R7), |
| BPF_ALU64_REG(BPF_SUB, R0, R8), |
| BPF_ALU64_REG(BPF_SUB, R0, R9), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 11 } } |
| }, |
| { /* Mainly checking JIT here. */ |
| "INT: XOR", |
| .u.insns_int = { |
| BPF_ALU64_REG(BPF_SUB, R0, R0), |
| BPF_ALU64_REG(BPF_XOR, R1, R1), |
| BPF_JMP_REG(BPF_JEQ, R0, R1, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_IMM(BPF_MOV, R0, 10), |
| BPF_ALU64_IMM(BPF_MOV, R1, -1), |
| BPF_ALU64_REG(BPF_SUB, R1, R1), |
| BPF_ALU64_REG(BPF_XOR, R2, R2), |
| BPF_JMP_REG(BPF_JEQ, R1, R2, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_REG(BPF_SUB, R2, R2), |
| BPF_ALU64_REG(BPF_XOR, R3, R3), |
| BPF_ALU64_IMM(BPF_MOV, R0, 10), |
| BPF_ALU64_IMM(BPF_MOV, R1, -1), |
| BPF_JMP_REG(BPF_JEQ, R2, R3, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_REG(BPF_SUB, R3, R3), |
| BPF_ALU64_REG(BPF_XOR, R4, R4), |
| BPF_ALU64_IMM(BPF_MOV, R2, 1), |
| BPF_ALU64_IMM(BPF_MOV, R5, -1), |
| BPF_JMP_REG(BPF_JEQ, R3, R4, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_REG(BPF_SUB, R4, R4), |
| BPF_ALU64_REG(BPF_XOR, R5, R5), |
| BPF_ALU64_IMM(BPF_MOV, R3, 1), |
| BPF_ALU64_IMM(BPF_MOV, R7, -1), |
| BPF_JMP_REG(BPF_JEQ, R5, R4, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_IMM(BPF_MOV, R5, 1), |
| BPF_ALU64_REG(BPF_SUB, R5, R5), |
| BPF_ALU64_REG(BPF_XOR, R6, R6), |
| BPF_ALU64_IMM(BPF_MOV, R1, 1), |
| BPF_ALU64_IMM(BPF_MOV, R8, -1), |
| BPF_JMP_REG(BPF_JEQ, R5, R6, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_REG(BPF_SUB, R6, R6), |
| BPF_ALU64_REG(BPF_XOR, R7, R7), |
| BPF_JMP_REG(BPF_JEQ, R7, R6, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_REG(BPF_SUB, R7, R7), |
| BPF_ALU64_REG(BPF_XOR, R8, R8), |
| BPF_JMP_REG(BPF_JEQ, R7, R8, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_REG(BPF_SUB, R8, R8), |
| BPF_ALU64_REG(BPF_XOR, R9, R9), |
| BPF_JMP_REG(BPF_JEQ, R9, R8, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_REG(BPF_SUB, R9, R9), |
| BPF_ALU64_REG(BPF_XOR, R0, R0), |
| BPF_JMP_REG(BPF_JEQ, R9, R0, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_REG(BPF_SUB, R1, R1), |
| BPF_ALU64_REG(BPF_XOR, R0, R0), |
| BPF_JMP_REG(BPF_JEQ, R9, R0, 2), |
| BPF_ALU64_IMM(BPF_MOV, R0, 0), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_IMM(BPF_MOV, R0, 1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 1 } } |
| }, |
| { /* Mainly checking JIT here. */ |
| "INT: MUL", |
| .u.insns_int = { |
| BPF_ALU64_IMM(BPF_MOV, R0, 11), |
| BPF_ALU64_IMM(BPF_MOV, R1, 1), |
| BPF_ALU64_IMM(BPF_MOV, R2, 2), |
| BPF_ALU64_IMM(BPF_MOV, R3, 3), |
| BPF_ALU64_IMM(BPF_MOV, R4, 4), |
| BPF_ALU64_IMM(BPF_MOV, R5, 5), |
| BPF_ALU64_IMM(BPF_MOV, R6, 6), |
| BPF_ALU64_IMM(BPF_MOV, R7, 7), |
| BPF_ALU64_IMM(BPF_MOV, R8, 8), |
| BPF_ALU64_IMM(BPF_MOV, R9, 9), |
| BPF_ALU64_REG(BPF_MUL, R0, R0), |
| BPF_ALU64_REG(BPF_MUL, R0, R1), |
| BPF_ALU64_REG(BPF_MUL, R0, R2), |
| BPF_ALU64_REG(BPF_MUL, R0, R3), |
| BPF_ALU64_REG(BPF_MUL, R0, R4), |
| BPF_ALU64_REG(BPF_MUL, R0, R5), |
| BPF_ALU64_REG(BPF_MUL, R0, R6), |
| BPF_ALU64_REG(BPF_MUL, R0, R7), |
| BPF_ALU64_REG(BPF_MUL, R0, R8), |
| BPF_ALU64_REG(BPF_MUL, R0, R9), |
| BPF_ALU64_IMM(BPF_MUL, R0, 10), |
| BPF_JMP_IMM(BPF_JEQ, R0, 439084800, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_REG(BPF_MUL, R1, R0), |
| BPF_ALU64_REG(BPF_MUL, R1, R2), |
| BPF_ALU64_REG(BPF_MUL, R1, R3), |
| BPF_ALU64_REG(BPF_MUL, R1, R4), |
| BPF_ALU64_REG(BPF_MUL, R1, R5), |
| BPF_ALU64_REG(BPF_MUL, R1, R6), |
| BPF_ALU64_REG(BPF_MUL, R1, R7), |
| BPF_ALU64_REG(BPF_MUL, R1, R8), |
| BPF_ALU64_REG(BPF_MUL, R1, R9), |
| BPF_ALU64_IMM(BPF_MUL, R1, 10), |
| BPF_ALU64_REG(BPF_MOV, R2, R1), |
| BPF_ALU64_IMM(BPF_RSH, R2, 32), |
| BPF_JMP_IMM(BPF_JEQ, R2, 0x5a924, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_IMM(BPF_LSH, R1, 32), |
| BPF_ALU64_IMM(BPF_ARSH, R1, 32), |
| BPF_JMP_IMM(BPF_JEQ, R1, 0xebb90000, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_REG(BPF_MUL, R2, R0), |
| BPF_ALU64_REG(BPF_MUL, R2, R1), |
| BPF_ALU64_REG(BPF_MUL, R2, R3), |
| BPF_ALU64_REG(BPF_MUL, R2, R4), |
| BPF_ALU64_REG(BPF_MUL, R2, R5), |
| BPF_ALU64_REG(BPF_MUL, R2, R6), |
| BPF_ALU64_REG(BPF_MUL, R2, R7), |
| BPF_ALU64_REG(BPF_MUL, R2, R8), |
| BPF_ALU64_REG(BPF_MUL, R2, R9), |
| BPF_ALU64_IMM(BPF_MUL, R2, 10), |
| BPF_ALU64_IMM(BPF_RSH, R2, 32), |
| BPF_ALU64_REG(BPF_MOV, R0, R2), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 0x35d97ef2 } } |
| }, |
| { /* Mainly checking JIT here. */ |
| "MOV REG64", |
| .u.insns_int = { |
| BPF_LD_IMM64(R0, 0xffffffffffffffffLL), |
| BPF_MOV64_REG(R1, R0), |
| BPF_MOV64_REG(R2, R1), |
| BPF_MOV64_REG(R3, R2), |
| BPF_MOV64_REG(R4, R3), |
| BPF_MOV64_REG(R5, R4), |
| BPF_MOV64_REG(R6, R5), |
| BPF_MOV64_REG(R7, R6), |
| BPF_MOV64_REG(R8, R7), |
| BPF_MOV64_REG(R9, R8), |
| BPF_ALU64_IMM(BPF_MOV, R0, 0), |
| BPF_ALU64_IMM(BPF_MOV, R1, 0), |
| BPF_ALU64_IMM(BPF_MOV, R2, 0), |
| BPF_ALU64_IMM(BPF_MOV, R3, 0), |
| BPF_ALU64_IMM(BPF_MOV, R4, 0), |
| BPF_ALU64_IMM(BPF_MOV, R5, 0), |
| BPF_ALU64_IMM(BPF_MOV, R6, 0), |
| BPF_ALU64_IMM(BPF_MOV, R7, 0), |
| BPF_ALU64_IMM(BPF_MOV, R8, 0), |
| BPF_ALU64_IMM(BPF_MOV, R9, 0), |
| BPF_ALU64_REG(BPF_ADD, R0, R0), |
| BPF_ALU64_REG(BPF_ADD, R0, R1), |
| BPF_ALU64_REG(BPF_ADD, R0, R2), |
| BPF_ALU64_REG(BPF_ADD, R0, R3), |
| BPF_ALU64_REG(BPF_ADD, R0, R4), |
| BPF_ALU64_REG(BPF_ADD, R0, R5), |
| BPF_ALU64_REG(BPF_ADD, R0, R6), |
| BPF_ALU64_REG(BPF_ADD, R0, R7), |
| BPF_ALU64_REG(BPF_ADD, R0, R8), |
| BPF_ALU64_REG(BPF_ADD, R0, R9), |
| BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 0xfefe } } |
| }, |
| { /* Mainly checking JIT here. */ |
| "MOV REG32", |
| .u.insns_int = { |
| BPF_LD_IMM64(R0, 0xffffffffffffffffLL), |
| BPF_MOV64_REG(R1, R0), |
| BPF_MOV64_REG(R2, R1), |
| BPF_MOV64_REG(R3, R2), |
| BPF_MOV64_REG(R4, R3), |
| BPF_MOV64_REG(R5, R4), |
| BPF_MOV64_REG(R6, R5), |
| BPF_MOV64_REG(R7, R6), |
| BPF_MOV64_REG(R8, R7), |
| BPF_MOV64_REG(R9, R8), |
| BPF_ALU32_IMM(BPF_MOV, R0, 0), |
| BPF_ALU32_IMM(BPF_MOV, R1, 0), |
| BPF_ALU32_IMM(BPF_MOV, R2, 0), |
| BPF_ALU32_IMM(BPF_MOV, R3, 0), |
| BPF_ALU32_IMM(BPF_MOV, R4, 0), |
| BPF_ALU32_IMM(BPF_MOV, R5, 0), |
| BPF_ALU32_IMM(BPF_MOV, R6, 0), |
| BPF_ALU32_IMM(BPF_MOV, R7, 0), |
| BPF_ALU32_IMM(BPF_MOV, R8, 0), |
| BPF_ALU32_IMM(BPF_MOV, R9, 0), |
| BPF_ALU64_REG(BPF_ADD, R0, R0), |
| BPF_ALU64_REG(BPF_ADD, R0, R1), |
| BPF_ALU64_REG(BPF_ADD, R0, R2), |
| BPF_ALU64_REG(BPF_ADD, R0, R3), |
| BPF_ALU64_REG(BPF_ADD, R0, R4), |
| BPF_ALU64_REG(BPF_ADD, R0, R5), |
| BPF_ALU64_REG(BPF_ADD, R0, R6), |
| BPF_ALU64_REG(BPF_ADD, R0, R7), |
| BPF_ALU64_REG(BPF_ADD, R0, R8), |
| BPF_ALU64_REG(BPF_ADD, R0, R9), |
| BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 0xfefe } } |
| }, |
| { /* Mainly checking JIT here. */ |
| "LD IMM64", |
| .u.insns_int = { |
| BPF_LD_IMM64(R0, 0xffffffffffffffffLL), |
| BPF_MOV64_REG(R1, R0), |
| BPF_MOV64_REG(R2, R1), |
| BPF_MOV64_REG(R3, R2), |
| BPF_MOV64_REG(R4, R3), |
| BPF_MOV64_REG(R5, R4), |
| BPF_MOV64_REG(R6, R5), |
| BPF_MOV64_REG(R7, R6), |
| BPF_MOV64_REG(R8, R7), |
| BPF_MOV64_REG(R9, R8), |
| BPF_LD_IMM64(R0, 0x0LL), |
| BPF_LD_IMM64(R1, 0x0LL), |
| BPF_LD_IMM64(R2, 0x0LL), |
| BPF_LD_IMM64(R3, 0x0LL), |
| BPF_LD_IMM64(R4, 0x0LL), |
| BPF_LD_IMM64(R5, 0x0LL), |
| BPF_LD_IMM64(R6, 0x0LL), |
| BPF_LD_IMM64(R7, 0x0LL), |
| BPF_LD_IMM64(R8, 0x0LL), |
| BPF_LD_IMM64(R9, 0x0LL), |
| BPF_ALU64_REG(BPF_ADD, R0, R0), |
| BPF_ALU64_REG(BPF_ADD, R0, R1), |
| BPF_ALU64_REG(BPF_ADD, R0, R2), |
| BPF_ALU64_REG(BPF_ADD, R0, R3), |
| BPF_ALU64_REG(BPF_ADD, R0, R4), |
| BPF_ALU64_REG(BPF_ADD, R0, R5), |
| BPF_ALU64_REG(BPF_ADD, R0, R6), |
| BPF_ALU64_REG(BPF_ADD, R0, R7), |
| BPF_ALU64_REG(BPF_ADD, R0, R8), |
| BPF_ALU64_REG(BPF_ADD, R0, R9), |
| BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 0xfefe } } |
| }, |
| { |
| "INT: ALU MIX", |
| .u.insns_int = { |
| BPF_ALU64_IMM(BPF_MOV, R0, 11), |
| BPF_ALU64_IMM(BPF_ADD, R0, -1), |
| BPF_ALU64_IMM(BPF_MOV, R2, 2), |
| BPF_ALU64_IMM(BPF_XOR, R2, 3), |
| BPF_ALU64_REG(BPF_DIV, R0, R2), |
| BPF_JMP_IMM(BPF_JEQ, R0, 10, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_IMM(BPF_MOD, R0, 3), |
| BPF_JMP_IMM(BPF_JEQ, R0, 1, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_IMM(BPF_MOV, R0, -1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, -1 } } |
| }, |
| { |
| "INT: shifts by register", |
| .u.insns_int = { |
| BPF_MOV64_IMM(R0, -1234), |
| BPF_MOV64_IMM(R1, 1), |
| BPF_ALU32_REG(BPF_RSH, R0, R1), |
| BPF_JMP_IMM(BPF_JEQ, R0, 0x7ffffd97, 1), |
| BPF_EXIT_INSN(), |
| BPF_MOV64_IMM(R2, 1), |
| BPF_ALU64_REG(BPF_LSH, R0, R2), |
| BPF_MOV32_IMM(R4, -1234), |
| BPF_JMP_REG(BPF_JEQ, R0, R4, 1), |
| BPF_EXIT_INSN(), |
| BPF_ALU64_IMM(BPF_AND, R4, 63), |
| BPF_ALU64_REG(BPF_LSH, R0, R4), /* R0 <= 46 */ |
| BPF_MOV64_IMM(R3, 47), |
| BPF_ALU64_REG(BPF_ARSH, R0, R3), |
| BPF_JMP_IMM(BPF_JEQ, R0, -617, 1), |
| BPF_EXIT_INSN(), |
| BPF_MOV64_IMM(R2, 1), |
| BPF_ALU64_REG(BPF_LSH, R4, R2), /* R4 = 46 << 1 */ |
| BPF_JMP_IMM(BPF_JEQ, R4, 92, 1), |
| BPF_EXIT_INSN(), |
| BPF_MOV64_IMM(R4, 4), |
| BPF_ALU64_REG(BPF_LSH, R4, R4), /* R4 = 4 << 4 */ |
| BPF_JMP_IMM(BPF_JEQ, R4, 64, 1), |
| BPF_EXIT_INSN(), |
| BPF_MOV64_IMM(R4, 5), |
| BPF_ALU32_REG(BPF_LSH, R4, R4), /* R4 = 5 << 5 */ |
| BPF_JMP_IMM(BPF_JEQ, R4, 160, 1), |
| BPF_EXIT_INSN(), |
| BPF_MOV64_IMM(R0, -1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, -1 } } |
| }, |
| { |
| "INT: DIV + ABS", |
| .u.insns_int = { |
| BPF_ALU64_REG(BPF_MOV, R6, R1), |
| BPF_LD_ABS(BPF_B, 3), |
| BPF_ALU64_IMM(BPF_MOV, R2, 2), |
| BPF_ALU32_REG(BPF_DIV, R0, R2), |
| BPF_ALU64_REG(BPF_MOV, R8, R0), |
| BPF_LD_ABS(BPF_B, 4), |
| BPF_ALU64_REG(BPF_ADD, R8, R0), |
| BPF_LD_IND(BPF_B, R8, -70), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { 10, 20, 30, 40, 50 }, |
| { { 4, 0 }, { 5, 10 } } |
| }, |
| { |
| "INT: DIV by zero", |
| .u.insns_int = { |
| BPF_ALU64_REG(BPF_MOV, R6, R1), |
| BPF_ALU64_IMM(BPF_MOV, R7, 0), |
| BPF_LD_ABS(BPF_B, 3), |
| BPF_ALU32_REG(BPF_DIV, R0, R7), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { 10, 20, 30, 40, 50 }, |
| { { 3, 0 }, { 4, 0 } } |
| }, |
| { |
| "check: missing ret", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_IMM, 1), |
| }, |
| CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL, |
| { }, |
| { } |
| }, |
| { |
| "check: div_k_0", |
| .u.insns = { |
| BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0), |
| BPF_STMT(BPF_RET | BPF_K, 0) |
| }, |
| CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL, |
| { }, |
| { } |
| }, |
| { |
| "check: unknown insn", |
| .u.insns = { |
| /* seccomp insn, rejected in socket filter */ |
| BPF_STMT(BPF_LDX | BPF_W | BPF_ABS, 0), |
| BPF_STMT(BPF_RET | BPF_K, 0) |
| }, |
| CLASSIC | FLAG_EXPECTED_FAIL, |
| { }, |
| { } |
| }, |
| { |
| "check: out of range spill/fill", |
| .u.insns = { |
| BPF_STMT(BPF_STX, 16), |
| BPF_STMT(BPF_RET | BPF_K, 0) |
| }, |
| CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL, |
| { }, |
| { } |
| }, |
| { |
| "JUMPS + HOLES", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 15), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 3, 4), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 1, 2), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15), |
| BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 2, 3), |
| BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 1, 2), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15), |
| BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 2, 3), |
| BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 1, 2), |
| BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0), |
| BPF_STMT(BPF_RET | BPF_A, 0), |
| BPF_STMT(BPF_RET | BPF_A, 0), |
| }, |
| CLASSIC, |
| { 0x00, 0x1b, 0x21, 0x3c, 0x9d, 0xf8, |
| 0x90, 0xe2, 0xba, 0x0a, 0x56, 0xb4, |
| 0x08, 0x00, |
| 0x45, 0x00, 0x00, 0x28, 0x00, 0x00, |
| 0x20, 0x00, 0x40, 0x11, 0x00, 0x00, /* IP header */ |
| 0xc0, 0xa8, 0x33, 0x01, |
| 0xc0, 0xa8, 0x33, 0x02, |
| 0xbb, 0xb6, |
| 0xa9, 0xfa, |
| 0x00, 0x14, 0x00, 0x00, |
| 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, |
| 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, |
| 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, |
| 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, |
| 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, |
| 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, |
| 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, |
| 0xcc, 0xcc, 0xcc, 0xcc }, |
| { { 88, 0x001b } } |
| }, |
| { |
| "check: RET X", |
| .u.insns = { |
| BPF_STMT(BPF_RET | BPF_X, 0), |
| }, |
| CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL, |
| { }, |
| { }, |
| }, |
| { |
| "check: LDX + RET X", |
| .u.insns = { |
| BPF_STMT(BPF_LDX | BPF_IMM, 42), |
| BPF_STMT(BPF_RET | BPF_X, 0), |
| }, |
| CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL, |
| { }, |
| { }, |
| }, |
| { /* Mainly checking JIT here. */ |
| "M[]: alt STX + LDX", |
| .u.insns = { |
| BPF_STMT(BPF_LDX | BPF_IMM, 100), |
| BPF_STMT(BPF_STX, 0), |
| BPF_STMT(BPF_LDX | BPF_MEM, 0), |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_STX, 1), |
| BPF_STMT(BPF_LDX | BPF_MEM, 1), |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_STX, 2), |
| BPF_STMT(BPF_LDX | BPF_MEM, 2), |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_STX, 3), |
| BPF_STMT(BPF_LDX | BPF_MEM, 3), |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_STX, 4), |
| BPF_STMT(BPF_LDX | BPF_MEM, 4), |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_STX, 5), |
| BPF_STMT(BPF_LDX | BPF_MEM, 5), |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_STX, 6), |
| BPF_STMT(BPF_LDX | BPF_MEM, 6), |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_STX, 7), |
| BPF_STMT(BPF_LDX | BPF_MEM, 7), |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_STX, 8), |
| BPF_STMT(BPF_LDX | BPF_MEM, 8), |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_STX, 9), |
| BPF_STMT(BPF_LDX | BPF_MEM, 9), |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_STX, 10), |
| BPF_STMT(BPF_LDX | BPF_MEM, 10), |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_STX, 11), |
| BPF_STMT(BPF_LDX | BPF_MEM, 11), |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_STX, 12), |
| BPF_STMT(BPF_LDX | BPF_MEM, 12), |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_STX, 13), |
| BPF_STMT(BPF_LDX | BPF_MEM, 13), |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_STX, 14), |
| BPF_STMT(BPF_LDX | BPF_MEM, 14), |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_STX, 15), |
| BPF_STMT(BPF_LDX | BPF_MEM, 15), |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1), |
| BPF_STMT(BPF_MISC | BPF_TAX, 0), |
| BPF_STMT(BPF_RET | BPF_A, 0), |
| }, |
| CLASSIC | FLAG_NO_DATA, |
| { }, |
| { { 0, 116 } }, |
| }, |
| { /* Mainly checking JIT here. */ |
| "M[]: full STX + full LDX", |
| .u.insns = { |
| BPF_STMT(BPF_LDX | BPF_IMM, 0xbadfeedb), |
| BPF_STMT(BPF_STX, 0), |
| BPF_STMT(BPF_LDX | BPF_IMM, 0xecabedae), |
| BPF_STMT(BPF_STX, 1), |
| BPF_STMT(BPF_LDX | BPF_IMM, 0xafccfeaf), |
| BPF_STMT(BPF_STX, 2), |
| BPF_STMT(BPF_LDX | BPF_IMM, 0xbffdcedc), |
| BPF_STMT(BPF_STX, 3), |
| BPF_STMT(BPF_LDX | BPF_IMM, 0xfbbbdccb), |
| BPF_STMT(BPF_STX, 4), |
| BPF_STMT(BPF_LDX | BPF_IMM, 0xfbabcbda), |
| BPF_STMT(BPF_STX, 5), |
| BPF_STMT(BPF_LDX | BPF_IMM, 0xaedecbdb), |
| BPF_STMT(BPF_STX, 6), |
| BPF_STMT(BPF_LDX | BPF_IMM, 0xadebbade), |
| BPF_STMT(BPF_STX, 7), |
| BPF_STMT(BPF_LDX | BPF_IMM, 0xfcfcfaec), |
| BPF_STMT(BPF_STX, 8), |
| BPF_STMT(BPF_LDX | BPF_IMM, 0xbcdddbdc), |
| BPF_STMT(BPF_STX, 9), |
| BPF_STMT(BPF_LDX | BPF_IMM, 0xfeefdfac), |
| BPF_STMT(BPF_STX, 10), |
| BPF_STMT(BPF_LDX | BPF_IMM, 0xcddcdeea), |
| BPF_STMT(BPF_STX, 11), |
| BPF_STMT(BPF_LDX | BPF_IMM, 0xaccfaebb), |
| BPF_STMT(BPF_STX, 12), |
| BPF_STMT(BPF_LDX | BPF_IMM, 0xbdcccdcf), |
| BPF_STMT(BPF_STX, 13), |
| BPF_STMT(BPF_LDX | BPF_IMM, 0xaaedecde), |
| BPF_STMT(BPF_STX, 14), |
| BPF_STMT(BPF_LDX | BPF_IMM, 0xfaeacdad), |
| BPF_STMT(BPF_STX, 15), |
| BPF_STMT(BPF_LDX | BPF_MEM, 0), |
| BPF_STMT(BPF_MISC | BPF_TXA, 0), |
| BPF_STMT(BPF_LDX | BPF_MEM, 1), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_LDX | BPF_MEM, 2), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_LDX | BPF_MEM, 3), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_LDX | BPF_MEM, 4), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_LDX | BPF_MEM, 5), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_LDX | BPF_MEM, 6), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_LDX | BPF_MEM, 7), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_LDX | BPF_MEM, 8), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_LDX | BPF_MEM, 9), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_LDX | BPF_MEM, 10), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_LDX | BPF_MEM, 11), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_LDX | BPF_MEM, 12), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_LDX | BPF_MEM, 13), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_LDX | BPF_MEM, 14), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_LDX | BPF_MEM, 15), |
| BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0), |
| BPF_STMT(BPF_RET | BPF_A, 0), |
| }, |
| CLASSIC | FLAG_NO_DATA, |
| { }, |
| { { 0, 0x2a5a5e5 } }, |
| }, |
| { |
| "check: SKF_AD_MAX", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF + SKF_AD_MAX), |
| BPF_STMT(BPF_RET | BPF_A, 0), |
| }, |
| CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL, |
| { }, |
| { }, |
| }, |
| { /* Passes checker but fails during runtime. */ |
| "LD [SKF_AD_OFF-1]", |
| .u.insns = { |
| BPF_STMT(BPF_LD | BPF_W | BPF_ABS, |
| SKF_AD_OFF - 1), |
| BPF_STMT(BPF_RET | BPF_K, 1), |
| }, |
| CLASSIC, |
| { }, |
| { { 1, 0 } }, |
| }, |
| { |
| "load 64-bit immediate", |
| .u.insns_int = { |
| BPF_LD_IMM64(R1, 0x567800001234LL), |
| BPF_MOV64_REG(R2, R1), |
| BPF_MOV64_REG(R3, R2), |
| BPF_ALU64_IMM(BPF_RSH, R2, 32), |
| BPF_ALU64_IMM(BPF_LSH, R3, 32), |
| BPF_ALU64_IMM(BPF_RSH, R3, 32), |
| BPF_ALU64_IMM(BPF_MOV, R0, 0), |
| BPF_JMP_IMM(BPF_JEQ, R2, 0x5678, 1), |
| BPF_EXIT_INSN(), |
| BPF_JMP_IMM(BPF_JEQ, R3, 0x1234, 1), |
| BPF_EXIT_INSN(), |
| BPF_LD_IMM64(R0, 0x1ffffffffLL), |
| BPF_ALU64_IMM(BPF_RSH, R0, 32), /* R0 = 1 */ |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 1 } } |
| }, |
| { |
| "nmap reduced", |
| .u.insns_int = { |
| BPF_MOV64_REG(R6, R1), |
| BPF_LD_ABS(BPF_H, 12), |
| BPF_JMP_IMM(BPF_JNE, R0, 0x806, 28), |
| BPF_LD_ABS(BPF_H, 12), |
| BPF_JMP_IMM(BPF_JNE, R0, 0x806, 26), |
| BPF_MOV32_IMM(R0, 18), |
| BPF_STX_MEM(BPF_W, R10, R0, -64), |
| BPF_LDX_MEM(BPF_W, R7, R10, -64), |
| BPF_LD_IND(BPF_W, R7, 14), |
| BPF_STX_MEM(BPF_W, R10, R0, -60), |
| BPF_MOV32_IMM(R0, 280971478), |
| BPF_STX_MEM(BPF_W, R10, R0, -56), |
| BPF_LDX_MEM(BPF_W, R7, R10, -56), |
| BPF_LDX_MEM(BPF_W, R0, R10, -60), |
| BPF_ALU32_REG(BPF_SUB, R0, R7), |
| BPF_JMP_IMM(BPF_JNE, R0, 0, 15), |
| BPF_LD_ABS(BPF_H, 12), |
| BPF_JMP_IMM(BPF_JNE, R0, 0x806, 13), |
| BPF_MOV32_IMM(R0, 22), |
| BPF_STX_MEM(BPF_W, R10, R0, -56), |
| BPF_LDX_MEM(BPF_W, R7, R10, -56), |
| BPF_LD_IND(BPF_H, R7, 14), |
| BPF_STX_MEM(BPF_W, R10, R0, -52), |
| BPF_MOV32_IMM(R0, 17366), |
| BPF_STX_MEM(BPF_W, R10, R0, -48), |
| BPF_LDX_MEM(BPF_W, R7, R10, -48), |
| BPF_LDX_MEM(BPF_W, R0, R10, -52), |
| BPF_ALU32_REG(BPF_SUB, R0, R7), |
| BPF_JMP_IMM(BPF_JNE, R0, 0, 2), |
| BPF_MOV32_IMM(R0, 256), |
| BPF_EXIT_INSN(), |
| BPF_MOV32_IMM(R0, 0), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x08, 0x06, 0, 0, |
| 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, |
| 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6}, |
| { { 38, 256 } } |
| }, |
| /* BPF_ALU | BPF_MOV | BPF_X */ |
| { |
| "ALU_MOV_X: dst = 2", |
| .u.insns_int = { |
| BPF_ALU32_IMM(BPF_MOV, R1, 2), |
| BPF_ALU32_REG(BPF_MOV, R0, R1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 2 } }, |
| }, |
| { |
| "ALU_MOV_X: dst = 4294967295", |
| .u.insns_int = { |
| BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U), |
| BPF_ALU32_REG(BPF_MOV, R0, R1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 4294967295U } }, |
| }, |
| { |
| "ALU64_MOV_X: dst = 2", |
| .u.insns_int = { |
| BPF_ALU32_IMM(BPF_MOV, R1, 2), |
| BPF_ALU64_REG(BPF_MOV, R0, R1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 2 } }, |
| }, |
| { |
| "ALU64_MOV_X: dst = 4294967295", |
| .u.insns_int = { |
| BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U), |
| BPF_ALU64_REG(BPF_MOV, R0, R1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 4294967295U } }, |
| }, |
| /* BPF_ALU | BPF_MOV | BPF_K */ |
| { |
| "ALU_MOV_K: dst = 2", |
| .u.insns_int = { |
| BPF_ALU32_IMM(BPF_MOV, R0, 2), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 2 } }, |
| }, |
| { |
| "ALU_MOV_K: dst = 4294967295", |
| .u.insns_int = { |
| BPF_ALU32_IMM(BPF_MOV, R0, 4294967295U), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 4294967295U } }, |
| }, |
| { |
| "ALU_MOV_K: 0x0000ffffffff0000 = 0x00000000ffffffff", |
| .u.insns_int = { |
| BPF_LD_IMM64(R2, 0x0000ffffffff0000LL), |
| BPF_LD_IMM64(R3, 0x00000000ffffffffLL), |
| BPF_ALU32_IMM(BPF_MOV, R2, 0xffffffff), |
| BPF_JMP_REG(BPF_JEQ, R2, R3, 2), |
| BPF_MOV32_IMM(R0, 2), |
| BPF_EXIT_INSN(), |
| BPF_MOV32_IMM(R0, 1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 0x1 } }, |
| }, |
| { |
| "ALU64_MOV_K: dst = 2", |
| .u.insns_int = { |
| BPF_ALU64_IMM(BPF_MOV, R0, 2), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 2 } }, |
| }, |
| { |
| "ALU64_MOV_K: dst = 2147483647", |
| .u.insns_int = { |
| BPF_ALU64_IMM(BPF_MOV, R0, 2147483647), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 2147483647 } }, |
| }, |
| { |
| "ALU64_OR_K: dst = 0x0", |
| .u.insns_int = { |
| BPF_LD_IMM64(R2, 0x0000ffffffff0000LL), |
| BPF_LD_IMM64(R3, 0x0), |
| BPF_ALU64_IMM(BPF_MOV, R2, 0x0), |
| BPF_JMP_REG(BPF_JEQ, R2, R3, 2), |
| BPF_MOV32_IMM(R0, 2), |
| BPF_EXIT_INSN(), |
| BPF_MOV32_IMM(R0, 1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 0x1 } }, |
| }, |
| { |
| "ALU64_MOV_K: dst = -1", |
| .u.insns_int = { |
| BPF_LD_IMM64(R2, 0x0000ffffffff0000LL), |
| BPF_LD_IMM64(R3, 0xffffffffffffffffLL), |
| BPF_ALU64_IMM(BPF_MOV, R2, 0xffffffff), |
| BPF_JMP_REG(BPF_JEQ, R2, R3, 2), |
| BPF_MOV32_IMM(R0, 2), |
| BPF_EXIT_INSN(), |
| BPF_MOV32_IMM(R0, 1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 0x1 } }, |
| }, |
| /* BPF_ALU | BPF_ADD | BPF_X */ |
| { |
| "ALU_ADD_X: 1 + 2 = 3", |
| .u.insns_int = { |
| BPF_LD_IMM64(R0, 1), |
| BPF_ALU32_IMM(BPF_MOV, R1, 2), |
| BPF_ALU32_REG(BPF_ADD, R0, R1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 3 } }, |
| }, |
| { |
| "ALU_ADD_X: 1 + 4294967294 = 4294967295", |
| .u.insns_int = { |
| BPF_LD_IMM64(R0, 1), |
| BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U), |
| BPF_ALU32_REG(BPF_ADD, R0, R1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 4294967295U } }, |
| }, |
| { |
| "ALU_ADD_X: 2 + 4294967294 = 0", |
| .u.insns_int = { |
| BPF_LD_IMM64(R0, 2), |
| BPF_LD_IMM64(R1, 4294967294U), |
| BPF_ALU32_REG(BPF_ADD, R0, R1), |
| BPF_JMP_IMM(BPF_JEQ, R0, 0, 2), |
| BPF_ALU32_IMM(BPF_MOV, R0, 0), |
| BPF_EXIT_INSN(), |
| BPF_ALU32_IMM(BPF_MOV, R0, 1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 1 } }, |
| }, |
| { |
| "ALU64_ADD_X: 1 + 2 = 3", |
| .u.insns_int = { |
| BPF_LD_IMM64(R0, 1), |
| BPF_ALU32_IMM(BPF_MOV, R1, 2), |
| BPF_ALU64_REG(BPF_ADD, R0, R1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 3 } }, |
| }, |
| { |
| "ALU64_ADD_X: 1 + 4294967294 = 4294967295", |
| .u.insns_int = { |
| BPF_LD_IMM64(R0, 1), |
| BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U), |
| BPF_ALU64_REG(BPF_ADD, R0, R1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 4294967295U } }, |
| }, |
| { |
| "ALU64_ADD_X: 2 + 4294967294 = 4294967296", |
| .u.insns_int = { |
| BPF_LD_IMM64(R0, 2), |
| BPF_LD_IMM64(R1, 4294967294U), |
| BPF_LD_IMM64(R2, 4294967296ULL), |
| BPF_ALU64_REG(BPF_ADD, R0, R1), |
| BPF_JMP_REG(BPF_JEQ, R0, R2, 2), |
| BPF_MOV32_IMM(R0, 0), |
| BPF_EXIT_INSN(), |
| BPF_MOV32_IMM(R0, 1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 1 } }, |
| }, |
| /* BPF_ALU | BPF_ADD | BPF_K */ |
| { |
| "ALU_ADD_K: 1 + 2 = 3", |
| .u.insns_int = { |
| BPF_LD_IMM64(R0, 1), |
| BPF_ALU32_IMM(BPF_ADD, R0, 2), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 3 } }, |
| }, |
| { |
| "ALU_ADD_K: 3 + 0 = 3", |
| .u.insns_int = { |
| BPF_LD_IMM64(R0, 3), |
| BPF_ALU32_IMM(BPF_ADD, R0, 0), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 3 } }, |
| }, |
| { |
| "ALU_ADD_K: 1 + 4294967294 = 4294967295", |
| .u.insns_int = { |
| BPF_LD_IMM64(R0, 1), |
| BPF_ALU32_IMM(BPF_ADD, R0, 4294967294U), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 4294967295U } }, |
| }, |
| { |
| "ALU_ADD_K: 4294967294 + 2 = 0", |
| .u.insns_int = { |
| BPF_LD_IMM64(R0, 4294967294U), |
| BPF_ALU32_IMM(BPF_ADD, R0, 2), |
| BPF_JMP_IMM(BPF_JEQ, R0, 0, 2), |
| BPF_ALU32_IMM(BPF_MOV, R0, 0), |
| BPF_EXIT_INSN(), |
| BPF_ALU32_IMM(BPF_MOV, R0, 1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 1 } }, |
| }, |
| { |
| "ALU_ADD_K: 0 + (-1) = 0x00000000ffffffff", |
| .u.insns_int = { |
| BPF_LD_IMM64(R2, 0x0), |
| BPF_LD_IMM64(R3, 0x00000000ffffffff), |
| BPF_ALU32_IMM(BPF_ADD, R2, 0xffffffff), |
| BPF_JMP_REG(BPF_JEQ, R2, R3, 2), |
| BPF_MOV32_IMM(R0, 2), |
| BPF_EXIT_INSN(), |
| BPF_MOV32_IMM(R0, 1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 0x1 } }, |
| }, |
| { |
| "ALU_ADD_K: 0 + 0xffff = 0xffff", |
| .u.insns_int = { |
| BPF_LD_IMM64(R2, 0x0), |
| BPF_LD_IMM64(R3, 0xffff), |
| BPF_ALU32_IMM(BPF_ADD, R2, 0xffff), |
| BPF_JMP_REG(BPF_JEQ, R2, R3, 2), |
| BPF_MOV32_IMM(R0, 2), |
| BPF_EXIT_INSN(), |
| BPF_MOV32_IMM(R0, 1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 0x1 } }, |
| }, |
| { |
| "ALU_ADD_K: 0 + 0x7fffffff = 0x7fffffff", |
| .u.insns_int = { |
| BPF_LD_IMM64(R2, 0x0), |
| BPF_LD_IMM64(R3, 0x7fffffff), |
| BPF_ALU32_IMM(BPF_ADD, R2, 0x7fffffff), |
| BPF_JMP_REG(BPF_JEQ, R2, R3, 2), |
| BPF_MOV32_IMM(R0, 2), |
| BPF_EXIT_INSN(), |
| BPF_MOV32_IMM(R0, 1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 0x1 } }, |
| }, |
| { |
| "ALU_ADD_K: 0 + 0x80000000 = 0x80000000", |
| .u.insns_int = { |
| BPF_LD_IMM64(R2, 0x0), |
| BPF_LD_IMM64(R3, 0x80000000), |
| BPF_ALU32_IMM(BPF_ADD, R2, 0x80000000), |
| BPF_JMP_REG(BPF_JEQ, R2, R3, 2), |
| BPF_MOV32_IMM(R0, 2), |
| BPF_EXIT_INSN(), |
| BPF_MOV32_IMM(R0, 1), |
| BPF_EXIT_INSN(), |
| }, |
| INTERNAL, |
| { }, |
| { { 0, 0x1 } }, |
| }, |
| { |
| "ALU_ADD_K: 0 + 0x80008000 = 0x80008000", |
| .u.insns_int = { |
| |