| /* This file contains sub-register zero extension checks for insns defining |
| * sub-registers, meaning: |
| * - All insns under BPF_ALU class. Their BPF_ALU32 variants or narrow width |
| * forms (BPF_END) could define sub-registers. |
| * - Narrow direct loads, BPF_B/H/W | BPF_LDX. |
| * - BPF_LD is not exposed to JIT back-ends, so no need for testing. |
| * |
| * "get_prandom_u32" is used to initialize low 32-bit of some registers to |
| * prevent potential optimizations done by verifier or JIT back-ends which could |
| * optimize register back into constant when range info shows one register is a |
| * constant. |
| */ |
| { |
| "add32 reg zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_MOV64_REG(BPF_REG_1, BPF_REG_0), |
| BPF_LD_IMM64(BPF_REG_0, 0x100000000ULL), |
| BPF_ALU32_REG(BPF_ADD, BPF_REG_0, BPF_REG_1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "add32 imm zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| /* An insn could have no effect on the low 32-bit, for example: |
| * a = a + 0 |
| * a = a | 0 |
| * a = a & -1 |
| * But, they should still zero high 32-bit. |
| */ |
| BPF_ALU32_IMM(BPF_ADD, BPF_REG_0, 0), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_MOV64_REG(BPF_REG_6, BPF_REG_0), |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU32_IMM(BPF_ADD, BPF_REG_0, -2), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "sub32 reg zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_MOV64_REG(BPF_REG_1, BPF_REG_0), |
| BPF_LD_IMM64(BPF_REG_0, 0x1ffffffffULL), |
| BPF_ALU32_REG(BPF_SUB, BPF_REG_0, BPF_REG_1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "sub32 imm zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU32_IMM(BPF_SUB, BPF_REG_0, 0), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_MOV64_REG(BPF_REG_6, BPF_REG_0), |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU32_IMM(BPF_SUB, BPF_REG_0, 1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "mul32 reg zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_MOV64_REG(BPF_REG_1, BPF_REG_0), |
| BPF_LD_IMM64(BPF_REG_0, 0x100000001ULL), |
| BPF_ALU32_REG(BPF_MUL, BPF_REG_0, BPF_REG_1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "mul32 imm zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU32_IMM(BPF_MUL, BPF_REG_0, 1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_MOV64_REG(BPF_REG_6, BPF_REG_0), |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU32_IMM(BPF_MUL, BPF_REG_0, -1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "div32 reg zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_MOV64_REG(BPF_REG_1, BPF_REG_0), |
| BPF_MOV64_IMM(BPF_REG_0, -1), |
| BPF_ALU32_REG(BPF_DIV, BPF_REG_0, BPF_REG_1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "div32 imm zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU32_IMM(BPF_DIV, BPF_REG_0, 1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_MOV64_REG(BPF_REG_6, BPF_REG_0), |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU32_IMM(BPF_DIV, BPF_REG_0, 2), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "or32 reg zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_MOV64_REG(BPF_REG_1, BPF_REG_0), |
| BPF_LD_IMM64(BPF_REG_0, 0x100000001ULL), |
| BPF_ALU32_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "or32 imm zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU32_IMM(BPF_OR, BPF_REG_0, 0), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_MOV64_REG(BPF_REG_6, BPF_REG_0), |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU32_IMM(BPF_OR, BPF_REG_0, 1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "and32 reg zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x100000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_1, BPF_REG_0), |
| BPF_LD_IMM64(BPF_REG_0, 0x1ffffffffULL), |
| BPF_ALU32_REG(BPF_AND, BPF_REG_0, BPF_REG_1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "and32 imm zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU32_IMM(BPF_AND, BPF_REG_0, -1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_MOV64_REG(BPF_REG_6, BPF_REG_0), |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU32_IMM(BPF_AND, BPF_REG_0, -2), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "lsh32 reg zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x100000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_MOV64_IMM(BPF_REG_1, 1), |
| BPF_ALU32_REG(BPF_LSH, BPF_REG_0, BPF_REG_1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "lsh32 imm zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU32_IMM(BPF_LSH, BPF_REG_0, 0), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_MOV64_REG(BPF_REG_6, BPF_REG_0), |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU32_IMM(BPF_LSH, BPF_REG_0, 1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "rsh32 reg zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_MOV64_IMM(BPF_REG_1, 1), |
| BPF_ALU32_REG(BPF_RSH, BPF_REG_0, BPF_REG_1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "rsh32 imm zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU32_IMM(BPF_RSH, BPF_REG_0, 0), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_MOV64_REG(BPF_REG_6, BPF_REG_0), |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU32_IMM(BPF_RSH, BPF_REG_0, 1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "neg32 reg zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU32_IMM(BPF_NEG, BPF_REG_0, 0), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "mod32 reg zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_MOV64_REG(BPF_REG_1, BPF_REG_0), |
| BPF_MOV64_IMM(BPF_REG_0, -1), |
| BPF_ALU32_REG(BPF_MOD, BPF_REG_0, BPF_REG_1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "mod32 imm zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU32_IMM(BPF_MOD, BPF_REG_0, 1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_MOV64_REG(BPF_REG_6, BPF_REG_0), |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU32_IMM(BPF_MOD, BPF_REG_0, 2), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "xor32 reg zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_MOV64_REG(BPF_REG_1, BPF_REG_0), |
| BPF_LD_IMM64(BPF_REG_0, 0x100000000ULL), |
| BPF_ALU32_REG(BPF_XOR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "xor32 imm zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU32_IMM(BPF_XOR, BPF_REG_0, 1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "mov32 reg zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x100000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_1, BPF_REG_0), |
| BPF_LD_IMM64(BPF_REG_0, 0x100000000ULL), |
| BPF_MOV32_REG(BPF_REG_0, BPF_REG_1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "mov32 imm zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_MOV32_IMM(BPF_REG_0, 0), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_MOV64_REG(BPF_REG_6, BPF_REG_0), |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_MOV32_IMM(BPF_REG_0, 1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "arsh32 reg zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_MOV64_IMM(BPF_REG_1, 1), |
| BPF_ALU32_REG(BPF_ARSH, BPF_REG_0, BPF_REG_1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "arsh32 imm zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU32_IMM(BPF_ARSH, BPF_REG_0, 0), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_MOV64_REG(BPF_REG_6, BPF_REG_0), |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_ALU32_IMM(BPF_ARSH, BPF_REG_0, 1), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "end16 (to_le) reg zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_MOV64_REG(BPF_REG_6, BPF_REG_0), |
| BPF_ALU64_IMM(BPF_LSH, BPF_REG_6, 32), |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6), |
| BPF_ENDIAN(BPF_TO_LE, BPF_REG_0, 16), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "end32 (to_le) reg zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_MOV64_REG(BPF_REG_6, BPF_REG_0), |
| BPF_ALU64_IMM(BPF_LSH, BPF_REG_6, 32), |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6), |
| BPF_ENDIAN(BPF_TO_LE, BPF_REG_0, 32), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "end16 (to_be) reg zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_MOV64_REG(BPF_REG_6, BPF_REG_0), |
| BPF_ALU64_IMM(BPF_LSH, BPF_REG_6, 32), |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6), |
| BPF_ENDIAN(BPF_TO_BE, BPF_REG_0, 16), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "end32 (to_be) reg zero extend check", |
| .insns = { |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_MOV64_REG(BPF_REG_6, BPF_REG_0), |
| BPF_ALU64_IMM(BPF_LSH, BPF_REG_6, 32), |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_6), |
| BPF_ENDIAN(BPF_TO_BE, BPF_REG_0, 32), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "ldx_b zero extend check", |
| .insns = { |
| BPF_MOV64_REG(BPF_REG_6, BPF_REG_10), |
| BPF_ALU64_IMM(BPF_ADD, BPF_REG_6, -4), |
| BPF_ST_MEM(BPF_W, BPF_REG_6, 0, 0xfaceb00c), |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_6, 0), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "ldx_h zero extend check", |
| .insns = { |
| BPF_MOV64_REG(BPF_REG_6, BPF_REG_10), |
| BPF_ALU64_IMM(BPF_ADD, BPF_REG_6, -4), |
| BPF_ST_MEM(BPF_W, BPF_REG_6, 0, 0xfaceb00c), |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_LDX_MEM(BPF_H, BPF_REG_0, BPF_REG_6, 0), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |
| { |
| "ldx_w zero extend check", |
| .insns = { |
| BPF_MOV64_REG(BPF_REG_6, BPF_REG_10), |
| BPF_ALU64_IMM(BPF_ADD, BPF_REG_6, -4), |
| BPF_ST_MEM(BPF_W, BPF_REG_6, 0, 0xfaceb00c), |
| BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32), |
| BPF_LD_IMM64(BPF_REG_1, 0x1000000000ULL), |
| BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1), |
| BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_6, 0), |
| BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32), |
| BPF_EXIT_INSN(), |
| }, |
| .result = ACCEPT, |
| .retval = 0, |
| }, |