From mboxrd@z Thu Jan 1 00:00:00 1970 From: =?UTF-8?q?Bj=C3=B6rn=20T=C3=B6pel?= Subject: [RFC PATCH 3/3] bpf, riscv: added eBPF JIT for RV64G Date: Tue, 15 Jan 2019 09:35:18 +0100 Message-ID: <20190115083518.10149-4-bjorn.topel@gmail.com> References: <20190115083518.10149-1-bjorn.topel@gmail.com> Mime-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Cc: =?UTF-8?q?Bj=C3=B6rn=20T=C3=B6pel?= , daniel@iogearbox.net, palmer@sifive.com, davidlee@sifive.com, netdev@vger.kernel.org To: linux-riscv@lists.infradead.org Return-path: Received: from mail-lj1-f193.google.com ([209.85.208.193]:38425 "EHLO mail-lj1-f193.google.com" rhost-flags-OK-OK-OK-OK) by vger.kernel.org with ESMTP id S1725869AbfAOIgG (ORCPT ); Tue, 15 Jan 2019 03:36:06 -0500 Received: by mail-lj1-f193.google.com with SMTP id c19-v6so1576409lja.5 for ; Tue, 15 Jan 2019 00:36:01 -0800 (PST) In-Reply-To: <20190115083518.10149-1-bjorn.topel@gmail.com> Sender: netdev-owner@vger.kernel.org List-ID: This commit adds eBPF JIT for RV64G. Codewise, it needs some refactoring. Currently there's a bit too much copy-and-paste going on, and I know some places where I could optimize the code generation a bit (mostly BPF_K type of instructions, dealing with immediates). >>From a features perspective, two things are missing: * tail calls * "far-branches", i.e. conditional branches that reach beyond 13b. The test_bpf.ko passes all tests. Signed-off-by: Björn Töpel --- arch/riscv/net/bpf_jit_comp.c | 1608 +++++++++++++++++++++++++++++++++ 1 file changed, 1608 insertions(+) diff --git a/arch/riscv/net/bpf_jit_comp.c b/arch/riscv/net/bpf_jit_comp.c index 7e359d3249ee..562d56eb8d23 100644 --- a/arch/riscv/net/bpf_jit_comp.c +++ b/arch/riscv/net/bpf_jit_comp.c @@ -1,4 +1,1612 @@ +// SPDX-License-Identifier: GPL-2.0 +/* + * BPF JIT compiler for RV64G + * + * Copyright(c) 2019 Björn Töpel + * + */ + +#include +#include +#include + +#define TMP_REG_0 (MAX_BPF_JIT_REG + 0) +#define TMP_REG_1 (MAX_BPF_JIT_REG + 1) +#define TAIL_CALL_REG (MAX_BPF_JIT_REG + 2) + +enum rv_register { + RV_REG_ZERO = 0, /* The constant value 0 */ + RV_REG_RA = 1, /* Return address */ + RV_REG_SP = 2, /* Stack pointer */ + RV_REG_GP = 3, /* Global pointer */ + RV_REG_TP = 4, /* Thread pointer */ + RV_REG_T0 = 5, /* Temporaries */ + RV_REG_T1 = 6, + RV_REG_T2 = 7, + RV_REG_FP = 8, + RV_REG_S1 = 9, /* Saved registers */ + RV_REG_A0 = 10, /* Function argument/return values */ + RV_REG_A1 = 11, /* Function arguments */ + RV_REG_A2 = 12, + RV_REG_A3 = 13, + RV_REG_A4 = 14, + RV_REG_A5 = 15, + RV_REG_A6 = 16, + RV_REG_A7 = 17, + RV_REG_S2 = 18, /* Saved registers */ + RV_REG_S3 = 19, + RV_REG_S4 = 20, + RV_REG_S5 = 21, + RV_REG_S6 = 22, + RV_REG_S7 = 23, + RV_REG_S8 = 24, + RV_REG_S9 = 25, + RV_REG_S10 = 26, + RV_REG_S11 = 27, + RV_REG_T3 = 28, /* Temporaries */ + RV_REG_T4 = 29, + RV_REG_T5 = 30, + RV_REG_T6 = 31, +}; + +struct rv_jit_context { + struct bpf_prog *prog; + u32 *insns; /* RV insns */ + int ninsns; + int epilogue_offset; + int *offset; /* BPF to RV */ + unsigned long seen_reg_bits; + int stack_size; +}; + +struct rv_jit_data { + struct bpf_binary_header *header; + u8 *image; + struct rv_jit_context ctx; +}; + +static u8 bpf_to_rv_reg(int bpf_reg, struct rv_jit_context *ctx) +{ + switch (bpf_reg) { + /* Return value */ + case BPF_REG_0: + __set_bit(RV_REG_A5, &ctx->seen_reg_bits); + return RV_REG_A5; + /* Function arguments */ + case BPF_REG_1: + __set_bit(RV_REG_A0, &ctx->seen_reg_bits); + return RV_REG_A0; + case BPF_REG_2: + __set_bit(RV_REG_A1, &ctx->seen_reg_bits); + return RV_REG_A1; + case BPF_REG_3: + __set_bit(RV_REG_A2, &ctx->seen_reg_bits); + return RV_REG_A2; + case BPF_REG_4: + __set_bit(RV_REG_A3, &ctx->seen_reg_bits); + return RV_REG_A3; + case BPF_REG_5: + __set_bit(RV_REG_A4, &ctx->seen_reg_bits); + return RV_REG_A4; + /* Callee saved registers */ + case BPF_REG_6: + __set_bit(RV_REG_S1, &ctx->seen_reg_bits); + return RV_REG_S1; + case BPF_REG_7: + __set_bit(RV_REG_S2, &ctx->seen_reg_bits); + return RV_REG_S2; + case BPF_REG_8: + __set_bit(RV_REG_S3, &ctx->seen_reg_bits); + return RV_REG_S3; + case BPF_REG_9: + __set_bit(RV_REG_S4, &ctx->seen_reg_bits); + return RV_REG_S4; + /* Stack read-only frame pointer to access stack */ + case BPF_REG_FP: + __set_bit(RV_REG_S5, &ctx->seen_reg_bits); + return RV_REG_S5; + /* Temporary register */ + case BPF_REG_AX: + __set_bit(RV_REG_T0, &ctx->seen_reg_bits); + return RV_REG_T0; + /* Tail call counter */ + case TAIL_CALL_REG: + __set_bit(RV_REG_S6, &ctx->seen_reg_bits); + return RV_REG_S6; + default: + return 0; + } +}; + +static void seen_call(struct rv_jit_context *ctx) +{ + __set_bit(RV_REG_RA, &ctx->seen_reg_bits); +} + +static bool seen_reg(int rv_reg, struct rv_jit_context *ctx) +{ + return test_bit(rv_reg, &ctx->seen_reg_bits); +} + +static void emit(const u32 insn, struct rv_jit_context *ctx) +{ + if (ctx->insns) + ctx->insns[ctx->ninsns] = insn; + + ctx->ninsns++; +} + +static u32 rv_r_insn(u8 funct7, u8 rs2, u8 rs1, u8 funct3, u8 rd, u8 opcode) +{ + return (funct7 << 25) | (rs2 << 20) | (rs1 << 15) | (funct3 << 12) | + (rd << 7) | opcode; +} + +static u32 rv_i_insn(u16 imm11_0, u8 rs1, u8 funct3, u8 rd, u8 opcode) +{ + return (imm11_0 << 20) | (rs1 << 15) | (funct3 << 12) | (rd << 7) | + opcode; +} + +static u32 rv_s_insn(u16 imm11_0, u8 rs2, u8 rs1, u8 funct3, u8 opcode) +{ + u8 imm11_5 = imm11_0 >> 5, imm4_0 = imm11_0 & 0x1f; + + return (imm11_5 << 25) | (rs2 << 20) | (rs1 << 15) | (funct3 << 12) | + (imm4_0 << 7) | opcode; +} + +static u32 rv_sb_insn(u16 imm12_1, u8 rs2, u8 rs1, u8 funct3, u8 opcode) +{ + u8 imm12 = ((imm12_1 & 0x800) >> 5) | ((imm12_1 & 0x3f0) >> 4); + u8 imm4_1 = ((imm12_1 & 0xf) << 1) | ((imm12_1 & 0x400) >> 10); + + return (imm12 << 25) | (rs2 << 20) | (rs1 << 15) | (funct3 << 12) | + (imm4_1 << 7) | opcode; +} + +static u32 rv_u_insn(u32 imm31_12, u8 rd, u8 opcode) +{ + return (imm31_12 << 12) | (rd << 7) | opcode; +} + +static u32 rv_uj_insn(u32 imm20_1, u8 rd, u8 opcode) +{ + u32 imm; + + imm = (imm20_1 & 0x80000) | ((imm20_1 & 0x3ff) << 9) | + ((imm20_1 & 0x400) >> 2) | ((imm20_1 & 0x7f800) >> 11); + + return (imm << 12) | (rd << 7) | opcode; +} + +static u32 rv_amo_insn(u8 funct5, u8 aq, u8 rl, u8 rs2, u8 rs1, + u8 funct3, u8 rd, u8 opcode) +{ + u8 funct7 = (funct5 << 2) | (aq << 1) | rl; + + return rv_r_insn(funct7, rs2, rs1, funct3, rd, opcode); +} + +static u32 rv_addiw(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(imm11_0, rs1, 0, rd, 0x1b); +} + +static u32 rv_addi(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(imm11_0, rs1, 0, rd, 0x13); +} + +static u32 rv_addw(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0, rs2, rs1, 0, rd, 0x3b); +} + +static u32 rv_add(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0, rs2, rs1, 0, rd, 0x33); +} + +static u32 rv_subw(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0x20, rs2, rs1, 0, rd, 0x3b); +} + +static u32 rv_sub(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0x20, rs2, rs1, 0, rd, 0x33); +} + +static u32 rv_and(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0, rs2, rs1, 7, rd, 0x33); +} + +static u32 rv_or(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0, rs2, rs1, 6, rd, 0x33); +} + +static u32 rv_xor(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0, rs2, rs1, 4, rd, 0x33); +} + +static u32 rv_mulw(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(1, rs2, rs1, 0, rd, 0x3b); +} + +static u32 rv_mul(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(1, rs2, rs1, 0, rd, 0x33); +} + +static u32 rv_divuw(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(1, rs2, rs1, 5, rd, 0x3b); +} + +static u32 rv_divu(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(1, rs2, rs1, 5, rd, 0x33); +} + +static u32 rv_remuw(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(1, rs2, rs1, 7, rd, 0x3b); +} + +static u32 rv_remu(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(1, rs2, rs1, 7, rd, 0x33); +} + +static u32 rv_sllw(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0, rs2, rs1, 1, rd, 0x3b); +} + +static u32 rv_sll(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0, rs2, rs1, 1, rd, 0x33); +} + +static u32 rv_srlw(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0, rs2, rs1, 5, rd, 0x3b); +} + +static u32 rv_srl(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0, rs2, rs1, 5, rd, 0x33); +} + +static u32 rv_sraw(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0x20, rs2, rs1, 5, rd, 0x3b); +} + +static u32 rv_sra(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0x20, rs2, rs1, 5, rd, 0x33); +} + +static u32 rv_lui(u8 rd, u32 imm31_12) +{ + return rv_u_insn(imm31_12, rd, 0x37); +} + +static u32 rv_slli(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(imm11_0, rs1, 1, rd, 0x13); +} + +static u32 rv_andi(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(imm11_0, rs1, 7, rd, 0x13); +} + +static u32 rv_ori(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(imm11_0, rs1, 6, rd, 0x13); +} + +static u32 rv_xori(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(imm11_0, rs1, 4, rd, 0x13); +} + +static u32 rv_slliw(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(imm11_0, rs1, 1, rd, 0x1b); +} + +static u32 rv_srliw(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(imm11_0, rs1, 5, rd, 0x1b); +} + +static u32 rv_srli(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(imm11_0, rs1, 5, rd, 0x13); +} + +static u32 rv_sraiw(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(0x400 | imm11_0, rs1, 5, rd, 0x1b); +} + +static u32 rv_srai(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(0x400 | imm11_0, rs1, 5, rd, 0x13); +} + +#if 0 +static u32 rv_auipc(u8 rd, u32 imm31_12) +{ + return rv_u_insn(imm31_12, rd, 0x17); +} +#endif + +static u32 rv_jal(u8 rd, u32 imm20_1) +{ + return rv_uj_insn(imm20_1, rd, 0x6f); +} + +static u32 rv_jalr(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(imm11_0, rs1, 0, rd, 0x67); +} + +static u32 rv_beq(u8 rs1, u8 rs2, u16 imm12_1) +{ + return rv_sb_insn(imm12_1, rs2, rs1, 0, 0x63); +} + +static u32 rv_bltu(u8 rs1, u8 rs2, u16 imm12_1) +{ + return rv_sb_insn(imm12_1, rs2, rs1, 6, 0x63); +} + +static u32 rv_bgeu(u8 rs1, u8 rs2, u16 imm12_1) +{ + return rv_sb_insn(imm12_1, rs2, rs1, 7, 0x63); +} + +static u32 rv_bne(u8 rs1, u8 rs2, u16 imm12_1) +{ + return rv_sb_insn(imm12_1, rs2, rs1, 1, 0x63); +} + +static u32 rv_blt(u8 rs1, u8 rs2, u16 imm12_1) +{ + return rv_sb_insn(imm12_1, rs2, rs1, 4, 0x63); +} + +static u32 rv_bge(u8 rs1, u8 rs2, u16 imm12_1) +{ + return rv_sb_insn(imm12_1, rs2, rs1, 5, 0x63); +} + +static u32 rv_sb(u8 rs1, u16 imm11_0, u8 rs2) +{ + return rv_s_insn(imm11_0, rs2, rs1, 0, 0x23); +} + +static u32 rv_sh(u8 rs1, u16 imm11_0, u8 rs2) +{ + return rv_s_insn(imm11_0, rs2, rs1, 1, 0x23); +} + +static u32 rv_sw(u8 rs1, u16 imm11_0, u8 rs2) +{ + return rv_s_insn(imm11_0, rs2, rs1, 2, 0x23); +} + +static u32 rv_sd(u8 rs1, u16 imm11_0, u8 rs2) +{ + return rv_s_insn(imm11_0, rs2, rs1, 3, 0x23); +} + +#if 0 +static u32 rv_lb(u8 rd, u16 imm11_0, u8 rs1) +{ + return rv_i_insn(imm11_0, rs1, 0, rd, 0x03); +} +#endif + +static u32 rv_lbu(u8 rd, u16 imm11_0, u8 rs1) +{ + return rv_i_insn(imm11_0, rs1, 4, rd, 0x03); +} + +#if 0 +static u32 rv_lh(u8 rd, u16 imm11_0, u8 rs1) +{ + return rv_i_insn(imm11_0, rs1, 1, rd, 0x03); +} +#endif + +static u32 rv_lhu(u8 rd, u16 imm11_0, u8 rs1) +{ + return rv_i_insn(imm11_0, rs1, 5, rd, 0x03); +} + +#if 0 +static u32 rv_lw(u8 rd, u16 imm11_0, u8 rs1) +{ + return rv_i_insn(imm11_0, rs1, 2, rd, 0x03); +} +#endif + +static u32 rv_lwu(u8 rd, u16 imm11_0, u8 rs1) +{ + return rv_i_insn(imm11_0, rs1, 6, rd, 0x03); +} + +static u32 rv_ld(u8 rd, u16 imm11_0, u8 rs1) +{ + return rv_i_insn(imm11_0, rs1, 3, rd, 0x03); +} + +static u32 rv_amoadd_w(u8 rd, u8 rs2, u8 rs1, u8 aq, u8 rl) +{ + return rv_amo_insn(0, aq, rl, rs2, rs1, 2, rd, 0x2f); +} + +static u32 rv_amoadd_d(u8 rd, u8 rs2, u8 rs1, u8 aq, u8 rl) +{ + return rv_amo_insn(0, aq, rl, rs2, rs1, 3, rd, 0x2f); +} + +static bool is_12b_int(s64 val) +{ + return -(1 << 11) <= val && val < (1 << 11); +} + +static bool is_32b_int(s64 val) +{ + return -(1L << 31) <= val && val < (1L << 31); +} + +/* jumps */ +static bool is_21b_int(s64 val) +{ + return -(1L << 20) <= val && val < (1L << 20); + +} + +/* conditional branches */ +static bool is_13b_int(s64 val) +{ + return -(1 << 12) <= val && val < (1 << 12); +} + +static void emit_imm(u8 rd, s64 val, struct rv_jit_context *ctx) +{ + /* Note that the immediate from the add is sign-extended, + * which means that we need to compensate this by adding 2^12, + * when the 12th bit is set. A simpler way of doing this, and + * getting rid of the check, is to just add 2**11 before the + * shift. The "Loading a 32-Bit constant" example from the + * "Computer Organization and Design, RISC-V edition" book by + * Patterson/Hennessy highlights this fact. + * + * This also means that we need to process LSB to MSB. + */ + s64 upper = (val + (1 << 11)) >> 12, lower = val & 0xfff; + int shift; + + if (is_32b_int(val)) { + if (upper) + emit(rv_lui(rd, upper), ctx); + + if (!upper) { + emit(rv_addi(rd, RV_REG_ZERO, lower), ctx); + return; + } + + emit(rv_addiw(rd, rd, lower), ctx); + return; + } + + shift = __ffs(upper); + upper >>= shift; + shift += 12; + + emit_imm(rd, upper, ctx); + + emit(rv_slli(rd, rd, shift), ctx); + if (lower) + emit(rv_addi(rd, rd, lower), ctx); +} + +static int rv_offset(int bpf_to, int bpf_from, struct rv_jit_context *ctx) +{ + int from = ctx->offset[bpf_from] - 1, to = ctx->offset[bpf_to]; + + return (to - from) << 2; +} + +static int epilogue_offset(struct rv_jit_context *ctx) +{ + int to = ctx->epilogue_offset, from = ctx->ninsns; + + return (to - from) << 2; +} + +static int emit_insn(const struct bpf_insn *insn, struct rv_jit_context *ctx, + bool extra_pass) +{ + bool is64 = BPF_CLASS(insn->code) == BPF_ALU64; + int rvoff, i = insn - ctx->prog->insnsi; + u8 rd, rs, code = insn->code; + s16 off = insn->off; + s32 imm = insn->imm; + + switch (code) { + /* dst = src */ + case BPF_ALU | BPF_MOV | BPF_X: + case BPF_ALU64 | BPF_MOV | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_addi(rd, rs, 0) : rv_addiw(rd, rs, 0), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + + /* dst = dst OP src */ + case BPF_ALU | BPF_ADD | BPF_X: + case BPF_ALU64 | BPF_ADD | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_add(rd, rd, rs) : rv_addw(rd, rd, rs), ctx); + break; + case BPF_ALU | BPF_SUB | BPF_X: + case BPF_ALU64 | BPF_SUB | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_sub(rd, rd, rs) : rv_subw(rd, rd, rs), ctx); + break; + case BPF_ALU | BPF_AND | BPF_X: + case BPF_ALU64 | BPF_AND | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_and(rd, rd, rs), ctx); + break; + case BPF_ALU | BPF_OR | BPF_X: + case BPF_ALU64 | BPF_OR | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_or(rd, rd, rs), ctx); + break; + case BPF_ALU | BPF_XOR | BPF_X: + case BPF_ALU64 | BPF_XOR | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_xor(rd, rd, rs), ctx); + break; + case BPF_ALU | BPF_MUL | BPF_X: + case BPF_ALU64 | BPF_MUL | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_mul(rd, rd, rs) : rv_mulw(rd, rd, rs), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_DIV | BPF_X: + case BPF_ALU64 | BPF_DIV | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_divu(rd, rd, rs) : rv_divuw(rd, rd, rs), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_MOD | BPF_X: + case BPF_ALU64 | BPF_MOD | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_remu(rd, rd, rs) : rv_remuw(rd, rd, rs), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_LSH | BPF_X: + case BPF_ALU64 | BPF_LSH | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_sll(rd, rd, rs) : rv_sllw(rd, rd, rs), ctx); + break; + case BPF_ALU | BPF_RSH | BPF_X: + case BPF_ALU64 | BPF_RSH | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_srl(rd, rd, rs) : rv_srlw(rd, rd, rs), ctx); + break; + case BPF_ALU | BPF_ARSH | BPF_X: + case BPF_ALU64 | BPF_ARSH | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_sra(rd, rd, rs) : rv_sraw(rd, rd, rs), ctx); + break; + + /* dst = -dst */ + case BPF_ALU | BPF_NEG: + case BPF_ALU64 | BPF_NEG: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? + rv_sub(rd, RV_REG_ZERO, rd) : + rv_subw(rd, RV_REG_ZERO, rd), + ctx); + break; + + /* dst = BSWAP##imm(dst) */ + case BPF_ALU | BPF_END | BPF_FROM_LE: + { + int shift = 64 - imm; + + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_slli(rd, rd, shift), ctx); + emit(rv_srli(rd, rd, shift), ctx); + break; + } + case BPF_ALU | BPF_END | BPF_FROM_BE: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + + emit(rv_addi(RV_REG_T2, RV_REG_ZERO, 0), ctx); + + emit(rv_andi(RV_REG_T1, rd, 0xff), ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx); + emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx); + emit(rv_srli(rd, rd, 8), ctx); + if (imm == 16) + goto out_be; + + emit(rv_andi(RV_REG_T1, rd, 0xff), ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx); + emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx); + emit(rv_srli(rd, rd, 8), ctx); + + emit(rv_andi(RV_REG_T1, rd, 0xff), ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx); + emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx); + emit(rv_srli(rd, rd, 8), ctx); + if (imm == 32) + goto out_be; + + emit(rv_andi(RV_REG_T1, rd, 0xff), ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx); + emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx); + emit(rv_srli(rd, rd, 8), ctx); + + emit(rv_andi(RV_REG_T1, rd, 0xff), ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx); + emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx); + emit(rv_srli(rd, rd, 8), ctx); + + emit(rv_andi(RV_REG_T1, rd, 0xff), ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx); + emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx); + emit(rv_srli(rd, rd, 8), ctx); + + emit(rv_andi(RV_REG_T1, rd, 0xff), ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx); + emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx); + emit(rv_srli(rd, rd, 8), ctx); + out_be: + emit(rv_andi(RV_REG_T1, rd, 0xff), ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx); + + emit(rv_addi(rd, RV_REG_T2, 0), ctx); + break; + + /* dst = imm */ + case BPF_ALU | BPF_MOV | BPF_K: + case BPF_ALU64 | BPF_MOV | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(rd, imm, ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + + /* dst = dst OP imm */ + case BPF_ALU | BPF_ADD | BPF_K: + case BPF_ALU64 | BPF_ADD | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(imm)) { + emit(is64 ? rv_addi(rd, rd, imm) : + rv_addiw(rd, rd, imm), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + } + emit_imm(RV_REG_T1, imm, ctx); + emit(is64 ? rv_add(rd, rd, RV_REG_T1) : + rv_addw(rd, rd, RV_REG_T1), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_SUB | BPF_K: + case BPF_ALU64 | BPF_SUB | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(-imm)) { + emit(is64 ? rv_addi(rd, rd, -imm) : + rv_addiw(rd, rd, -imm), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + } + emit_imm(RV_REG_T1, imm, ctx); + emit(is64 ? rv_sub(rd, rd, RV_REG_T1) : + rv_subw(rd, rd, RV_REG_T1), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_AND | BPF_K: + case BPF_ALU64 | BPF_AND | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(imm)) { + emit(rv_andi(rd, rd, imm), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + } + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_and(rd, rd, RV_REG_T1), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_OR | BPF_K: + case BPF_ALU64 | BPF_OR | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(imm)) { + emit(rv_ori(rd, rd, imm), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + } + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_or(rd, rd, RV_REG_T1), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_XOR | BPF_K: + case BPF_ALU64 | BPF_XOR | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(imm)) { + emit(rv_xori(rd, rd, imm), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + } + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_xor(rd, rd, RV_REG_T1), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_MUL | BPF_K: + case BPF_ALU64 | BPF_MUL | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(is64 ? rv_mul(rd, rd, RV_REG_T1) : + rv_mulw(rd, rd, RV_REG_T1), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_DIV | BPF_K: + case BPF_ALU64 | BPF_DIV | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(is64 ? rv_divu(rd, rd, RV_REG_T1) : + rv_divuw(rd, rd, RV_REG_T1), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_MOD | BPF_K: + case BPF_ALU64 | BPF_MOD | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(is64 ? rv_remu(rd, rd, RV_REG_T1) : + rv_remuw(rd, rd, RV_REG_T1), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_LSH | BPF_K: + case BPF_ALU64 | BPF_LSH | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_slli(rd, rd, imm) : + rv_slliw(rd, rd, imm), ctx); + break; + case BPF_ALU | BPF_RSH | BPF_K: + case BPF_ALU64 | BPF_RSH | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_srli(rd, rd, imm) : + rv_srliw(rd, rd, imm), ctx); + break; + case BPF_ALU | BPF_ARSH | BPF_K: + case BPF_ALU64 | BPF_ARSH | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_srai(rd, rd, imm) : + rv_sraiw(rd, rd, imm), ctx); + break; + + /* JUMP off */ + case BPF_JMP | BPF_JA: + rvoff = rv_offset(i + off, i, ctx); + if (!is_21b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, rvoff); + return -1; + } + + emit(rv_jal(RV_REG_ZERO, rvoff >> 1), ctx); + break; + + /* IF (dst COND src) JUMP off */ + case BPF_JMP | BPF_JEQ | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_beq(rd, rs, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JGT | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_bltu(rs, rd, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JLT | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_bltu(rd, rs, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JGE | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_bgeu(rd, rs, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JLE | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_bgeu(rs, rd, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JNE | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_bne(rd, rs, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JSGT | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_blt(rs, rd, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JSLT | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_blt(rd, rs, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JSGE | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_bge(rd, rs, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JSLE | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_bge(rs, rd, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JSET | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_and(RV_REG_T1, rd, rs), ctx); + emit(rv_bne(RV_REG_T1, RV_REG_ZERO, rvoff >> 1), ctx); + break; + + /* IF (dst COND imm) JUMP off */ + case BPF_JMP | BPF_JEQ | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_beq(rd, RV_REG_T1, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JGT | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_bltu(RV_REG_T1, rd, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JLT | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_bltu(rd, RV_REG_T1, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JGE | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_bgeu(rd, RV_REG_T1, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JLE | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_bgeu(RV_REG_T1, rd, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JNE | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_bne(rd, RV_REG_T1, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JSGT | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_blt(RV_REG_T1, rd, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JSLT | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_blt(rd, RV_REG_T1, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JSGE | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_bge(rd, RV_REG_T1, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JSLE | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_bge(RV_REG_T1, rd, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JSET | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T2, imm, ctx); + emit(rv_and(RV_REG_T1, rd, RV_REG_T2), ctx); + emit(rv_bne(RV_REG_T1, RV_REG_ZERO, rvoff >> 1), ctx); + break; + + /* function call */ + case BPF_JMP | BPF_CALL: + { + bool fixed; + int i, ret; + u64 addr; + + seen_call(ctx); + ret = bpf_jit_get_func_addr(ctx->prog, insn, extra_pass, &addr, + &fixed); + if (ret < 0) + return ret; + if (fixed) { + emit_imm(RV_REG_T1, addr, ctx); + } else { + i = ctx->ninsns; + emit_imm(RV_REG_T1, addr, ctx); + for (i = ctx->ninsns - i; i < 8; i++) { + /* nop */ + emit(rv_addi(RV_REG_ZERO, RV_REG_ZERO, 0), + ctx); + } + } + emit(rv_jalr(RV_REG_RA, RV_REG_T1, 0), ctx); + rd = bpf_to_rv_reg(BPF_REG_0, ctx); + emit(rv_addi(rd, RV_REG_A0, 0), ctx); + break; + } + /* tail call */ + case BPF_JMP | BPF_TAIL_CALL: + rd = bpf_to_rv_reg(TAIL_CALL_REG, ctx); + pr_err("bpf-jit: tail call not supported yet!\n"); + return -1; + + /* function return */ + case BPF_JMP | BPF_EXIT: + if (i == ctx->prog->len - 1) + break; + + rvoff = epilogue_offset(ctx); + if (!is_21b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, rvoff); + return -1; + } + + emit(rv_jal(RV_REG_ZERO, rvoff >> 1), ctx); + break; + + /* dst = imm64 */ + case BPF_LD | BPF_IMM | BPF_DW: + { + struct bpf_insn insn1 = insn[1]; + u64 imm64; + + imm64 = (u64)insn1.imm << 32 | (u32)imm; + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(rd, imm64, ctx); + return 1; + } + + /* LDX: dst = *(size *)(src + off) */ + case BPF_LDX | BPF_MEM | BPF_B: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(off)) { + emit(rv_lbu(rd, off, rs), ctx); + break; + } + + emit_imm(RV_REG_T1, off, ctx); + emit(rv_add(RV_REG_T1, RV_REG_T1, rs), ctx); + emit(rv_lbu(rd, 0, RV_REG_T1), ctx); + break; + case BPF_LDX | BPF_MEM | BPF_H: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(off)) { + emit(rv_lhu(rd, off, rs), ctx); + break; + } + + emit_imm(RV_REG_T1, off, ctx); + emit(rv_add(RV_REG_T1, RV_REG_T1, rs), ctx); + emit(rv_lhu(rd, 0, RV_REG_T1), ctx); + break; + case BPF_LDX | BPF_MEM | BPF_W: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(off)) { + emit(rv_lwu(rd, off, rs), ctx); + break; + } + + emit_imm(RV_REG_T1, off, ctx); + emit(rv_add(RV_REG_T1, RV_REG_T1, rs), ctx); + emit(rv_lwu(rd, 0, RV_REG_T1), ctx); + break; + case BPF_LDX | BPF_MEM | BPF_DW: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(off)) { + emit(rv_ld(rd, off, rs), ctx); + break; + } + + emit_imm(RV_REG_T1, off, ctx); + emit(rv_add(RV_REG_T1, RV_REG_T1, rs), ctx); + emit(rv_ld(rd, 0, RV_REG_T1), ctx); + break; + + /* ST: *(size *)(dst + off) = imm */ + case BPF_ST | BPF_MEM | BPF_B: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + if (is_12b_int(off)) { + emit(rv_sb(rd, off, RV_REG_T1), ctx); + break; + } + + emit_imm(RV_REG_T2, off, ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, rd), ctx); + emit(rv_sb(RV_REG_T2, 0, RV_REG_T1), ctx); + break; + + case BPF_ST | BPF_MEM | BPF_H: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + if (is_12b_int(off)) { + emit(rv_sh(rd, off, RV_REG_T1), ctx); + break; + } + + emit_imm(RV_REG_T2, off, ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, rd), ctx); + emit(rv_sh(RV_REG_T2, 0, RV_REG_T1), ctx); + break; + case BPF_ST | BPF_MEM | BPF_W: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + if (is_12b_int(off)) { + emit(rv_sw(rd, off, RV_REG_T1), ctx); + break; + } + + emit_imm(RV_REG_T2, off, ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, rd), ctx); + emit(rv_sw(RV_REG_T2, 0, RV_REG_T1), ctx); + break; + case BPF_ST | BPF_MEM | BPF_DW: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + if (is_12b_int(off)) { + emit(rv_sd(rd, off, RV_REG_T1), ctx); + break; + } + + emit_imm(RV_REG_T2, off, ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, rd), ctx); + emit(rv_sd(RV_REG_T2, 0, RV_REG_T1), ctx); + break; + + /* STX: *(size *)(dst + off) = src */ + case BPF_STX | BPF_MEM | BPF_B: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(off)) { + emit(rv_sb(rd, off, rs), ctx); + break; + } + + emit_imm(RV_REG_T1, off, ctx); + emit(rv_add(RV_REG_T1, RV_REG_T1, rd), ctx); + emit(rv_sb(RV_REG_T1, 0, rs), ctx); + break; + case BPF_STX | BPF_MEM | BPF_H: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(off)) { + emit(rv_sh(rd, off, rs), ctx); + break; + } + + emit_imm(RV_REG_T1, off, ctx); + emit(rv_add(RV_REG_T1, RV_REG_T1, rd), ctx); + emit(rv_sh(RV_REG_T1, 0, rs), ctx); + break; + case BPF_STX | BPF_MEM | BPF_W: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(off)) { + emit(rv_sw(rd, off, rs), ctx); + break; + } + + emit_imm(RV_REG_T1, off, ctx); + emit(rv_add(RV_REG_T1, RV_REG_T1, rd), ctx); + emit(rv_sw(RV_REG_T1, 0, rs), ctx); + break; + case BPF_STX | BPF_MEM | BPF_DW: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(off)) { + emit(rv_sd(rd, off, rs), ctx); + break; + } + + emit_imm(RV_REG_T1, off, ctx); + emit(rv_add(RV_REG_T1, RV_REG_T1, rd), ctx); + emit(rv_sd(RV_REG_T1, 0, rs), ctx); + break; + /* STX XADD: lock *(u32 *)(dst + off) += src */ + case BPF_STX | BPF_XADD | BPF_W: + /* STX XADD: lock *(u64 *)(dst + off) += src */ + case BPF_STX | BPF_XADD | BPF_DW: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (off) { + if (is_12b_int(off)) { + emit(rv_addi(RV_REG_T1, rd, off), ctx); + } else { + emit_imm(RV_REG_T1, off, ctx); + emit(rv_add(RV_REG_T1, RV_REG_T1, rd), ctx); + } + + rd = RV_REG_T1; + } + + emit(BPF_SIZE(code) == BPF_W ? + rv_amoadd_w(RV_REG_ZERO, rs, rd, 0, 0) : + rv_amoadd_d(RV_REG_ZERO, rs, rd, 0, 0), ctx); + break; + default: + pr_err("bpf-jit: unknown opcode %02x\n", code); + return -EINVAL; + } + + return 0; +} + +static void build_prologue(struct rv_jit_context *ctx) +{ + int stack_adjust = 0, store_offset, bpf_stack_adjust; + + if (seen_reg(RV_REG_RA, ctx)) + stack_adjust += 8; + stack_adjust += 8; /* RV_REG_FP */ + if (seen_reg(RV_REG_S1, ctx)) + stack_adjust += 8; + if (seen_reg(RV_REG_S2, ctx)) + stack_adjust += 8; + if (seen_reg(RV_REG_S3, ctx)) + stack_adjust += 8; + if (seen_reg(RV_REG_S4, ctx)) + stack_adjust += 8; + if (seen_reg(RV_REG_S5, ctx)) + stack_adjust += 8; + if (seen_reg(RV_REG_S6, ctx)) + stack_adjust += 8; + + stack_adjust = round_up(stack_adjust, 16); + bpf_stack_adjust = round_up(ctx->prog->aux->stack_depth, 16); + stack_adjust += bpf_stack_adjust; + + store_offset = stack_adjust - 8; + + emit(rv_addi(RV_REG_SP, RV_REG_SP, -stack_adjust), ctx); + + if (seen_reg(RV_REG_RA, ctx)) { + emit(rv_sd(RV_REG_SP, store_offset, RV_REG_RA), ctx); + store_offset -= 8; + } + emit(rv_sd(RV_REG_SP, store_offset, RV_REG_FP), ctx); + store_offset -= 8; + if (seen_reg(RV_REG_S1, ctx)) { + emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S1), ctx); + store_offset -= 8; + } + if (seen_reg(RV_REG_S2, ctx)) { + emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S2), ctx); + store_offset -= 8; + } + if (seen_reg(RV_REG_S3, ctx)) { + emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S3), ctx); + store_offset -= 8; + } + if (seen_reg(RV_REG_S4, ctx)) { + emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S4), ctx); + store_offset -= 8; + } + if (seen_reg(RV_REG_S5, ctx)) { + emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S5), ctx); + store_offset -= 8; + } + if (seen_reg(RV_REG_S6, ctx)) { + emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S6), ctx); + store_offset -= 8; + } + + emit(rv_addi(RV_REG_FP, RV_REG_SP, stack_adjust), ctx); + + if (bpf_stack_adjust) { + if (!seen_reg(RV_REG_S5, ctx)) + pr_warn("bpf-jit: not seen BPF_REG_FP, stack is %d\n", + bpf_stack_adjust); + emit(rv_addi(RV_REG_S5, RV_REG_SP, bpf_stack_adjust), ctx); + } + + ctx->stack_size = stack_adjust; +} + +static void build_epilogue(struct rv_jit_context *ctx) +{ + int stack_adjust = ctx->stack_size, store_offset = stack_adjust - 8; + + if (seen_reg(RV_REG_RA, ctx)) { + emit(rv_ld(RV_REG_RA, store_offset, RV_REG_SP), ctx); + store_offset -= 8; + } + emit(rv_ld(RV_REG_FP, store_offset, RV_REG_SP), ctx); + store_offset -= 8; + if (seen_reg(RV_REG_S1, ctx)) { + emit(rv_ld(RV_REG_S1, store_offset, RV_REG_SP), ctx); + store_offset -= 8; + } + if (seen_reg(RV_REG_S2, ctx)) { + emit(rv_ld(RV_REG_S2, store_offset, RV_REG_SP), ctx); + store_offset -= 8; + } + if (seen_reg(RV_REG_S3, ctx)) { + emit(rv_ld(RV_REG_S3, store_offset, RV_REG_SP), ctx); + store_offset -= 8; + } + if (seen_reg(RV_REG_S4, ctx)) { + emit(rv_ld(RV_REG_S4, store_offset, RV_REG_SP), ctx); + store_offset -= 8; + } + if (seen_reg(RV_REG_S5, ctx)) { + emit(rv_ld(RV_REG_S5, store_offset, RV_REG_SP), ctx); + store_offset -= 8; + } + if (seen_reg(RV_REG_S6, ctx)) { + emit(rv_ld(RV_REG_S6, store_offset, RV_REG_SP), ctx); + store_offset -= 8; + } + + emit(rv_addi(RV_REG_SP, RV_REG_SP, stack_adjust), ctx); + /* Set return value. */ + emit(rv_addi(RV_REG_A0, RV_REG_A5, 0), ctx); + emit(rv_jalr(RV_REG_ZERO, RV_REG_RA, 0), ctx); +} + +static int build_body(struct rv_jit_context *ctx, bool extra_pass) +{ + const struct bpf_prog *prog = ctx->prog; + int i; + + for (i = 0; i < prog->len; i++) { + const struct bpf_insn *insn = &prog->insnsi[i]; + int ret; + + ret = emit_insn(insn, ctx, extra_pass); + if (ret > 0) { + i++; + if (ctx->insns == NULL) + ctx->offset[i] = ctx->ninsns; + continue; + } + if (ctx->insns == NULL) + ctx->offset[i] = ctx->ninsns; + if (ret) + return ret; + } + return 0; +} + +static void bpf_fill_ill_insns(void *area, unsigned int size) +{ + memset(area, 0, size); +} + +static void bpf_flush_icache(void *start, void *end) +{ + flush_icache_range((unsigned long)start, (unsigned long)end); +} + struct bpf_prog *bpf_int_jit_compile(struct bpf_prog *prog) { + bool tmp_blinded = false, extra_pass = false; + struct bpf_prog *tmp, *orig_prog = prog; + struct rv_jit_data *jit_data; + struct rv_jit_context *ctx; + unsigned int image_size; + + if (!prog->jit_requested) + return orig_prog; + + tmp = bpf_jit_blind_constants(prog); + if (IS_ERR(tmp)) + return orig_prog; + if (tmp != prog) { + tmp_blinded = true; + prog = tmp; + } + + jit_data = prog->aux->jit_data; + if (!jit_data) { + jit_data = kzalloc(sizeof(*jit_data), GFP_KERNEL); + if (!jit_data) { + prog = orig_prog; + goto out; + } + prog->aux->jit_data = jit_data; + } + + ctx = &jit_data->ctx; + + if (ctx->offset) { + extra_pass = true; + image_size = sizeof(u32) * ctx->ninsns; + goto skip_init_ctx; + } + + ctx->prog = prog; + ctx->offset = kcalloc(prog->len, sizeof(int), GFP_KERNEL); + if (!ctx->offset) { + prog = orig_prog; + goto out_offset; + } + + /* First pass generates the ctx->offset, but does not emit an image. */ + if (build_body(ctx, extra_pass)) { + prog = orig_prog; + goto out_offset; + } + build_prologue(ctx); + ctx->epilogue_offset = ctx->ninsns; + build_epilogue(ctx); + + /* Allocate image, now that we know the size. */ + image_size = sizeof(u32) * ctx->ninsns; + jit_data->header = bpf_jit_binary_alloc(image_size, &jit_data->image, + sizeof(u32), + bpf_fill_ill_insns); + if (!jit_data->header) { + prog = orig_prog; + goto out_offset; + } + + /* Second, real pass, that acutally emits the image. */ + ctx->insns = (u32 *)jit_data->image; +skip_init_ctx: + ctx->ninsns = 0; + + build_prologue(ctx); + if (build_body(ctx, extra_pass)) { + bpf_jit_binary_free(jit_data->header); + prog = orig_prog; + goto out_offset; + } + build_epilogue(ctx); + + if (bpf_jit_enable > 1) + bpf_jit_dump(prog->len, image_size, 2, ctx->insns); + + prog->bpf_func = (void *)ctx->insns; + prog->jited = 1; + prog->jited_len = image_size; + + bpf_flush_icache(jit_data->header, (u8 *)ctx->insns + ctx->ninsns); + + if (!prog->is_func || extra_pass) { +out_offset: + kfree(ctx->offset); + kfree(jit_data); + prog->aux->jit_data = NULL; + } +out: + if (tmp_blinded) + bpf_jit_prog_release_other(prog, prog == orig_prog ? + tmp : orig_prog); return prog; } -- 2.19.1 From mboxrd@z Thu Jan 1 00:00:00 1970 Return-Path: X-Spam-Checker-Version: SpamAssassin 3.4.0 (2014-02-07) on aws-us-west-2-korg-lkml-1.web.codeaurora.org X-Spam-Level: X-Spam-Status: No, score=-9.1 required=3.0 tests=DKIM_SIGNED,DKIM_VALID, DKIM_VALID_AU,FREEMAIL_FORGED_FROMDOMAIN,FREEMAIL_FROM, HEADER_FROM_DIFFERENT_DOMAINS,INCLUDES_PATCH,MAILING_LIST_MULTI,SIGNED_OFF_BY, SPF_PASS,USER_AGENT_GIT autolearn=ham autolearn_force=no version=3.4.0 Received: from mail.kernel.org (mail.kernel.org [198.145.29.99]) by smtp.lore.kernel.org (Postfix) with ESMTP id 70371C43387 for ; Tue, 15 Jan 2019 08:36:08 +0000 (UTC) Received: from vger.kernel.org (vger.kernel.org [209.132.180.67]) by mail.kernel.org (Postfix) with ESMTP id 184CA2063F for ; Tue, 15 Jan 2019 08:36:08 +0000 (UTC) Authentication-Results: mail.kernel.org; dkim=pass (2048-bit key) header.d=gmail.com header.i=@gmail.com header.b="f0In0suy" Received: (majordomo@vger.kernel.org) by vger.kernel.org via listexpand id S1728363AbfAOIgG (ORCPT ); Tue, 15 Jan 2019 03:36:06 -0500 Received: from mail-lj1-f193.google.com ([209.85.208.193]:38425 "EHLO mail-lj1-f193.google.com" rhost-flags-OK-OK-OK-OK) by vger.kernel.org with ESMTP id S1725869AbfAOIgG (ORCPT ); Tue, 15 Jan 2019 03:36:06 -0500 Received: by mail-lj1-f193.google.com with SMTP id c19-v6so1576409lja.5 for ; Tue, 15 Jan 2019 00:36:01 -0800 (PST) DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=gmail.com; s=20161025; h=from:to:cc:subject:date:message-id:in-reply-to:references :mime-version:content-transfer-encoding; bh=uPq7ZfCSIXyIRsibTm50nWPNGbHlUnFTPFoej6YpplU=; b=f0In0suyet9+f6dKFSicK6IW6pRBb/+0+0+jVdDNwUlF5lW2Fgo8piR0k4fWMBDQdB rigD55ou/u1tj/WaXNk4dE8ljTCSBjPmd+/G5uL+9CB9sx2GTKJ5hl17UU4AvfL/N98G FLHKhMAQR7z4AfhCOluW8xzVPFG9XAj63IlLMbIbzAcHApWVvSibOcxWpVDH1HNVxKa4 7Grv4QLbkenadhZyYfsPbUm9HiJySXniOj1KB9XFgAk9UR2fWE0eKIWSuxumeONTwq2a QlJ5c5dgegn9XVGHxIKh5ArPAqTPt4VMDL7aLYidxwK5kZcXepejeXEZTaKGQ/1MvILB Ejew== X-Google-DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=1e100.net; s=20161025; h=x-gm-message-state:from:to:cc:subject:date:message-id:in-reply-to :references:mime-version:content-transfer-encoding; bh=uPq7ZfCSIXyIRsibTm50nWPNGbHlUnFTPFoej6YpplU=; b=bR6Zl2ReqAkQBAKj/MvLZf8nZyKoTyenNgnpbJtG/A4IIrzc3398Vs3qqRnqknetMP DbBJ8AtwuWOW+2dIoX7mxn/bnvbGVT7vXWtBnAauxghi69A8SF+QqQ4hETz0bLMsFfxH GcvNoIsbSp6c9keVHhXaGR8p6on5Sd7hE0FdOR7nFplcRx+G4sKxMOXri6KZbSdIq54p I9+5NBQUh9WCdLvGyysrM9CS5SvBFmU+viO8SofzZ2Y7E7dJv07h8vqHk3QTb4Ar5yze YMy9dmKL2QFEEYH1UxNoOIWIr87GCQtGUGV/tQS9iRQ9ijV0kn3ub0iwODr5+9JbhnbB nWJg== X-Gm-Message-State: AJcUukc8nps/5T3d1S0EGqIpWuPfR/hHkKmGK+pYi3tecdMD74yfNAzy H05XozW6yDuwiu0EM8fKjx8nxjXTXXHPgg== X-Google-Smtp-Source: ALg8bN4SX1tIXo2r90GkBM1tAEmTvlcNKSFufEZtEhAXSzj79nCzA2ce04seChDcPx5Eonvcnf2k3Q== X-Received: by 2002:a2e:20c3:: with SMTP id g64-v6mr1985927lji.101.1547541359678; Tue, 15 Jan 2019 00:35:59 -0800 (PST) Received: from btopel-mobl.ger.intel.com ([213.100.199.124]) by smtp.gmail.com with ESMTPSA id e14-v6sm453004ljb.31.2019.01.15.00.35.58 (version=TLS1_2 cipher=ECDHE-RSA-AES128-GCM-SHA256 bits=128/128); Tue, 15 Jan 2019 00:35:59 -0800 (PST) From: =?UTF-8?q?Bj=C3=B6rn=20T=C3=B6pel?= To: linux-riscv@lists.infradead.org Cc: =?UTF-8?q?Bj=C3=B6rn=20T=C3=B6pel?= , daniel@iogearbox.net, palmer@sifive.com, davidlee@sifive.com, netdev@vger.kernel.org Subject: [RFC PATCH 3/3] bpf, riscv: added eBPF JIT for RV64G Date: Tue, 15 Jan 2019 09:35:18 +0100 Message-Id: <20190115083518.10149-4-bjorn.topel@gmail.com> X-Mailer: git-send-email 2.19.1 In-Reply-To: <20190115083518.10149-1-bjorn.topel@gmail.com> References: <20190115083518.10149-1-bjorn.topel@gmail.com> MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Sender: netdev-owner@vger.kernel.org Precedence: bulk List-ID: X-Mailing-List: netdev@vger.kernel.org Message-ID: <20190115083518.XFYQCIH0fHyu9yRgZB0qSwvpzIoA4bZBaPtkHr2UufQ@z> This commit adds eBPF JIT for RV64G. Codewise, it needs some refactoring. Currently there's a bit too much copy-and-paste going on, and I know some places where I could optimize the code generation a bit (mostly BPF_K type of instructions, dealing with immediates). >From a features perspective, two things are missing: * tail calls * "far-branches", i.e. conditional branches that reach beyond 13b. The test_bpf.ko passes all tests. Signed-off-by: Björn Töpel --- arch/riscv/net/bpf_jit_comp.c | 1608 +++++++++++++++++++++++++++++++++ 1 file changed, 1608 insertions(+) diff --git a/arch/riscv/net/bpf_jit_comp.c b/arch/riscv/net/bpf_jit_comp.c index 7e359d3249ee..562d56eb8d23 100644 --- a/arch/riscv/net/bpf_jit_comp.c +++ b/arch/riscv/net/bpf_jit_comp.c @@ -1,4 +1,1612 @@ +// SPDX-License-Identifier: GPL-2.0 +/* + * BPF JIT compiler for RV64G + * + * Copyright(c) 2019 Björn Töpel + * + */ + +#include +#include +#include + +#define TMP_REG_0 (MAX_BPF_JIT_REG + 0) +#define TMP_REG_1 (MAX_BPF_JIT_REG + 1) +#define TAIL_CALL_REG (MAX_BPF_JIT_REG + 2) + +enum rv_register { + RV_REG_ZERO = 0, /* The constant value 0 */ + RV_REG_RA = 1, /* Return address */ + RV_REG_SP = 2, /* Stack pointer */ + RV_REG_GP = 3, /* Global pointer */ + RV_REG_TP = 4, /* Thread pointer */ + RV_REG_T0 = 5, /* Temporaries */ + RV_REG_T1 = 6, + RV_REG_T2 = 7, + RV_REG_FP = 8, + RV_REG_S1 = 9, /* Saved registers */ + RV_REG_A0 = 10, /* Function argument/return values */ + RV_REG_A1 = 11, /* Function arguments */ + RV_REG_A2 = 12, + RV_REG_A3 = 13, + RV_REG_A4 = 14, + RV_REG_A5 = 15, + RV_REG_A6 = 16, + RV_REG_A7 = 17, + RV_REG_S2 = 18, /* Saved registers */ + RV_REG_S3 = 19, + RV_REG_S4 = 20, + RV_REG_S5 = 21, + RV_REG_S6 = 22, + RV_REG_S7 = 23, + RV_REG_S8 = 24, + RV_REG_S9 = 25, + RV_REG_S10 = 26, + RV_REG_S11 = 27, + RV_REG_T3 = 28, /* Temporaries */ + RV_REG_T4 = 29, + RV_REG_T5 = 30, + RV_REG_T6 = 31, +}; + +struct rv_jit_context { + struct bpf_prog *prog; + u32 *insns; /* RV insns */ + int ninsns; + int epilogue_offset; + int *offset; /* BPF to RV */ + unsigned long seen_reg_bits; + int stack_size; +}; + +struct rv_jit_data { + struct bpf_binary_header *header; + u8 *image; + struct rv_jit_context ctx; +}; + +static u8 bpf_to_rv_reg(int bpf_reg, struct rv_jit_context *ctx) +{ + switch (bpf_reg) { + /* Return value */ + case BPF_REG_0: + __set_bit(RV_REG_A5, &ctx->seen_reg_bits); + return RV_REG_A5; + /* Function arguments */ + case BPF_REG_1: + __set_bit(RV_REG_A0, &ctx->seen_reg_bits); + return RV_REG_A0; + case BPF_REG_2: + __set_bit(RV_REG_A1, &ctx->seen_reg_bits); + return RV_REG_A1; + case BPF_REG_3: + __set_bit(RV_REG_A2, &ctx->seen_reg_bits); + return RV_REG_A2; + case BPF_REG_4: + __set_bit(RV_REG_A3, &ctx->seen_reg_bits); + return RV_REG_A3; + case BPF_REG_5: + __set_bit(RV_REG_A4, &ctx->seen_reg_bits); + return RV_REG_A4; + /* Callee saved registers */ + case BPF_REG_6: + __set_bit(RV_REG_S1, &ctx->seen_reg_bits); + return RV_REG_S1; + case BPF_REG_7: + __set_bit(RV_REG_S2, &ctx->seen_reg_bits); + return RV_REG_S2; + case BPF_REG_8: + __set_bit(RV_REG_S3, &ctx->seen_reg_bits); + return RV_REG_S3; + case BPF_REG_9: + __set_bit(RV_REG_S4, &ctx->seen_reg_bits); + return RV_REG_S4; + /* Stack read-only frame pointer to access stack */ + case BPF_REG_FP: + __set_bit(RV_REG_S5, &ctx->seen_reg_bits); + return RV_REG_S5; + /* Temporary register */ + case BPF_REG_AX: + __set_bit(RV_REG_T0, &ctx->seen_reg_bits); + return RV_REG_T0; + /* Tail call counter */ + case TAIL_CALL_REG: + __set_bit(RV_REG_S6, &ctx->seen_reg_bits); + return RV_REG_S6; + default: + return 0; + } +}; + +static void seen_call(struct rv_jit_context *ctx) +{ + __set_bit(RV_REG_RA, &ctx->seen_reg_bits); +} + +static bool seen_reg(int rv_reg, struct rv_jit_context *ctx) +{ + return test_bit(rv_reg, &ctx->seen_reg_bits); +} + +static void emit(const u32 insn, struct rv_jit_context *ctx) +{ + if (ctx->insns) + ctx->insns[ctx->ninsns] = insn; + + ctx->ninsns++; +} + +static u32 rv_r_insn(u8 funct7, u8 rs2, u8 rs1, u8 funct3, u8 rd, u8 opcode) +{ + return (funct7 << 25) | (rs2 << 20) | (rs1 << 15) | (funct3 << 12) | + (rd << 7) | opcode; +} + +static u32 rv_i_insn(u16 imm11_0, u8 rs1, u8 funct3, u8 rd, u8 opcode) +{ + return (imm11_0 << 20) | (rs1 << 15) | (funct3 << 12) | (rd << 7) | + opcode; +} + +static u32 rv_s_insn(u16 imm11_0, u8 rs2, u8 rs1, u8 funct3, u8 opcode) +{ + u8 imm11_5 = imm11_0 >> 5, imm4_0 = imm11_0 & 0x1f; + + return (imm11_5 << 25) | (rs2 << 20) | (rs1 << 15) | (funct3 << 12) | + (imm4_0 << 7) | opcode; +} + +static u32 rv_sb_insn(u16 imm12_1, u8 rs2, u8 rs1, u8 funct3, u8 opcode) +{ + u8 imm12 = ((imm12_1 & 0x800) >> 5) | ((imm12_1 & 0x3f0) >> 4); + u8 imm4_1 = ((imm12_1 & 0xf) << 1) | ((imm12_1 & 0x400) >> 10); + + return (imm12 << 25) | (rs2 << 20) | (rs1 << 15) | (funct3 << 12) | + (imm4_1 << 7) | opcode; +} + +static u32 rv_u_insn(u32 imm31_12, u8 rd, u8 opcode) +{ + return (imm31_12 << 12) | (rd << 7) | opcode; +} + +static u32 rv_uj_insn(u32 imm20_1, u8 rd, u8 opcode) +{ + u32 imm; + + imm = (imm20_1 & 0x80000) | ((imm20_1 & 0x3ff) << 9) | + ((imm20_1 & 0x400) >> 2) | ((imm20_1 & 0x7f800) >> 11); + + return (imm << 12) | (rd << 7) | opcode; +} + +static u32 rv_amo_insn(u8 funct5, u8 aq, u8 rl, u8 rs2, u8 rs1, + u8 funct3, u8 rd, u8 opcode) +{ + u8 funct7 = (funct5 << 2) | (aq << 1) | rl; + + return rv_r_insn(funct7, rs2, rs1, funct3, rd, opcode); +} + +static u32 rv_addiw(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(imm11_0, rs1, 0, rd, 0x1b); +} + +static u32 rv_addi(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(imm11_0, rs1, 0, rd, 0x13); +} + +static u32 rv_addw(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0, rs2, rs1, 0, rd, 0x3b); +} + +static u32 rv_add(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0, rs2, rs1, 0, rd, 0x33); +} + +static u32 rv_subw(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0x20, rs2, rs1, 0, rd, 0x3b); +} + +static u32 rv_sub(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0x20, rs2, rs1, 0, rd, 0x33); +} + +static u32 rv_and(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0, rs2, rs1, 7, rd, 0x33); +} + +static u32 rv_or(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0, rs2, rs1, 6, rd, 0x33); +} + +static u32 rv_xor(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0, rs2, rs1, 4, rd, 0x33); +} + +static u32 rv_mulw(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(1, rs2, rs1, 0, rd, 0x3b); +} + +static u32 rv_mul(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(1, rs2, rs1, 0, rd, 0x33); +} + +static u32 rv_divuw(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(1, rs2, rs1, 5, rd, 0x3b); +} + +static u32 rv_divu(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(1, rs2, rs1, 5, rd, 0x33); +} + +static u32 rv_remuw(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(1, rs2, rs1, 7, rd, 0x3b); +} + +static u32 rv_remu(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(1, rs2, rs1, 7, rd, 0x33); +} + +static u32 rv_sllw(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0, rs2, rs1, 1, rd, 0x3b); +} + +static u32 rv_sll(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0, rs2, rs1, 1, rd, 0x33); +} + +static u32 rv_srlw(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0, rs2, rs1, 5, rd, 0x3b); +} + +static u32 rv_srl(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0, rs2, rs1, 5, rd, 0x33); +} + +static u32 rv_sraw(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0x20, rs2, rs1, 5, rd, 0x3b); +} + +static u32 rv_sra(u8 rd, u8 rs1, u8 rs2) +{ + return rv_r_insn(0x20, rs2, rs1, 5, rd, 0x33); +} + +static u32 rv_lui(u8 rd, u32 imm31_12) +{ + return rv_u_insn(imm31_12, rd, 0x37); +} + +static u32 rv_slli(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(imm11_0, rs1, 1, rd, 0x13); +} + +static u32 rv_andi(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(imm11_0, rs1, 7, rd, 0x13); +} + +static u32 rv_ori(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(imm11_0, rs1, 6, rd, 0x13); +} + +static u32 rv_xori(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(imm11_0, rs1, 4, rd, 0x13); +} + +static u32 rv_slliw(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(imm11_0, rs1, 1, rd, 0x1b); +} + +static u32 rv_srliw(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(imm11_0, rs1, 5, rd, 0x1b); +} + +static u32 rv_srli(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(imm11_0, rs1, 5, rd, 0x13); +} + +static u32 rv_sraiw(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(0x400 | imm11_0, rs1, 5, rd, 0x1b); +} + +static u32 rv_srai(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(0x400 | imm11_0, rs1, 5, rd, 0x13); +} + +#if 0 +static u32 rv_auipc(u8 rd, u32 imm31_12) +{ + return rv_u_insn(imm31_12, rd, 0x17); +} +#endif + +static u32 rv_jal(u8 rd, u32 imm20_1) +{ + return rv_uj_insn(imm20_1, rd, 0x6f); +} + +static u32 rv_jalr(u8 rd, u8 rs1, u16 imm11_0) +{ + return rv_i_insn(imm11_0, rs1, 0, rd, 0x67); +} + +static u32 rv_beq(u8 rs1, u8 rs2, u16 imm12_1) +{ + return rv_sb_insn(imm12_1, rs2, rs1, 0, 0x63); +} + +static u32 rv_bltu(u8 rs1, u8 rs2, u16 imm12_1) +{ + return rv_sb_insn(imm12_1, rs2, rs1, 6, 0x63); +} + +static u32 rv_bgeu(u8 rs1, u8 rs2, u16 imm12_1) +{ + return rv_sb_insn(imm12_1, rs2, rs1, 7, 0x63); +} + +static u32 rv_bne(u8 rs1, u8 rs2, u16 imm12_1) +{ + return rv_sb_insn(imm12_1, rs2, rs1, 1, 0x63); +} + +static u32 rv_blt(u8 rs1, u8 rs2, u16 imm12_1) +{ + return rv_sb_insn(imm12_1, rs2, rs1, 4, 0x63); +} + +static u32 rv_bge(u8 rs1, u8 rs2, u16 imm12_1) +{ + return rv_sb_insn(imm12_1, rs2, rs1, 5, 0x63); +} + +static u32 rv_sb(u8 rs1, u16 imm11_0, u8 rs2) +{ + return rv_s_insn(imm11_0, rs2, rs1, 0, 0x23); +} + +static u32 rv_sh(u8 rs1, u16 imm11_0, u8 rs2) +{ + return rv_s_insn(imm11_0, rs2, rs1, 1, 0x23); +} + +static u32 rv_sw(u8 rs1, u16 imm11_0, u8 rs2) +{ + return rv_s_insn(imm11_0, rs2, rs1, 2, 0x23); +} + +static u32 rv_sd(u8 rs1, u16 imm11_0, u8 rs2) +{ + return rv_s_insn(imm11_0, rs2, rs1, 3, 0x23); +} + +#if 0 +static u32 rv_lb(u8 rd, u16 imm11_0, u8 rs1) +{ + return rv_i_insn(imm11_0, rs1, 0, rd, 0x03); +} +#endif + +static u32 rv_lbu(u8 rd, u16 imm11_0, u8 rs1) +{ + return rv_i_insn(imm11_0, rs1, 4, rd, 0x03); +} + +#if 0 +static u32 rv_lh(u8 rd, u16 imm11_0, u8 rs1) +{ + return rv_i_insn(imm11_0, rs1, 1, rd, 0x03); +} +#endif + +static u32 rv_lhu(u8 rd, u16 imm11_0, u8 rs1) +{ + return rv_i_insn(imm11_0, rs1, 5, rd, 0x03); +} + +#if 0 +static u32 rv_lw(u8 rd, u16 imm11_0, u8 rs1) +{ + return rv_i_insn(imm11_0, rs1, 2, rd, 0x03); +} +#endif + +static u32 rv_lwu(u8 rd, u16 imm11_0, u8 rs1) +{ + return rv_i_insn(imm11_0, rs1, 6, rd, 0x03); +} + +static u32 rv_ld(u8 rd, u16 imm11_0, u8 rs1) +{ + return rv_i_insn(imm11_0, rs1, 3, rd, 0x03); +} + +static u32 rv_amoadd_w(u8 rd, u8 rs2, u8 rs1, u8 aq, u8 rl) +{ + return rv_amo_insn(0, aq, rl, rs2, rs1, 2, rd, 0x2f); +} + +static u32 rv_amoadd_d(u8 rd, u8 rs2, u8 rs1, u8 aq, u8 rl) +{ + return rv_amo_insn(0, aq, rl, rs2, rs1, 3, rd, 0x2f); +} + +static bool is_12b_int(s64 val) +{ + return -(1 << 11) <= val && val < (1 << 11); +} + +static bool is_32b_int(s64 val) +{ + return -(1L << 31) <= val && val < (1L << 31); +} + +/* jumps */ +static bool is_21b_int(s64 val) +{ + return -(1L << 20) <= val && val < (1L << 20); + +} + +/* conditional branches */ +static bool is_13b_int(s64 val) +{ + return -(1 << 12) <= val && val < (1 << 12); +} + +static void emit_imm(u8 rd, s64 val, struct rv_jit_context *ctx) +{ + /* Note that the immediate from the add is sign-extended, + * which means that we need to compensate this by adding 2^12, + * when the 12th bit is set. A simpler way of doing this, and + * getting rid of the check, is to just add 2**11 before the + * shift. The "Loading a 32-Bit constant" example from the + * "Computer Organization and Design, RISC-V edition" book by + * Patterson/Hennessy highlights this fact. + * + * This also means that we need to process LSB to MSB. + */ + s64 upper = (val + (1 << 11)) >> 12, lower = val & 0xfff; + int shift; + + if (is_32b_int(val)) { + if (upper) + emit(rv_lui(rd, upper), ctx); + + if (!upper) { + emit(rv_addi(rd, RV_REG_ZERO, lower), ctx); + return; + } + + emit(rv_addiw(rd, rd, lower), ctx); + return; + } + + shift = __ffs(upper); + upper >>= shift; + shift += 12; + + emit_imm(rd, upper, ctx); + + emit(rv_slli(rd, rd, shift), ctx); + if (lower) + emit(rv_addi(rd, rd, lower), ctx); +} + +static int rv_offset(int bpf_to, int bpf_from, struct rv_jit_context *ctx) +{ + int from = ctx->offset[bpf_from] - 1, to = ctx->offset[bpf_to]; + + return (to - from) << 2; +} + +static int epilogue_offset(struct rv_jit_context *ctx) +{ + int to = ctx->epilogue_offset, from = ctx->ninsns; + + return (to - from) << 2; +} + +static int emit_insn(const struct bpf_insn *insn, struct rv_jit_context *ctx, + bool extra_pass) +{ + bool is64 = BPF_CLASS(insn->code) == BPF_ALU64; + int rvoff, i = insn - ctx->prog->insnsi; + u8 rd, rs, code = insn->code; + s16 off = insn->off; + s32 imm = insn->imm; + + switch (code) { + /* dst = src */ + case BPF_ALU | BPF_MOV | BPF_X: + case BPF_ALU64 | BPF_MOV | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_addi(rd, rs, 0) : rv_addiw(rd, rs, 0), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + + /* dst = dst OP src */ + case BPF_ALU | BPF_ADD | BPF_X: + case BPF_ALU64 | BPF_ADD | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_add(rd, rd, rs) : rv_addw(rd, rd, rs), ctx); + break; + case BPF_ALU | BPF_SUB | BPF_X: + case BPF_ALU64 | BPF_SUB | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_sub(rd, rd, rs) : rv_subw(rd, rd, rs), ctx); + break; + case BPF_ALU | BPF_AND | BPF_X: + case BPF_ALU64 | BPF_AND | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_and(rd, rd, rs), ctx); + break; + case BPF_ALU | BPF_OR | BPF_X: + case BPF_ALU64 | BPF_OR | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_or(rd, rd, rs), ctx); + break; + case BPF_ALU | BPF_XOR | BPF_X: + case BPF_ALU64 | BPF_XOR | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_xor(rd, rd, rs), ctx); + break; + case BPF_ALU | BPF_MUL | BPF_X: + case BPF_ALU64 | BPF_MUL | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_mul(rd, rd, rs) : rv_mulw(rd, rd, rs), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_DIV | BPF_X: + case BPF_ALU64 | BPF_DIV | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_divu(rd, rd, rs) : rv_divuw(rd, rd, rs), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_MOD | BPF_X: + case BPF_ALU64 | BPF_MOD | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_remu(rd, rd, rs) : rv_remuw(rd, rd, rs), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_LSH | BPF_X: + case BPF_ALU64 | BPF_LSH | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_sll(rd, rd, rs) : rv_sllw(rd, rd, rs), ctx); + break; + case BPF_ALU | BPF_RSH | BPF_X: + case BPF_ALU64 | BPF_RSH | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_srl(rd, rd, rs) : rv_srlw(rd, rd, rs), ctx); + break; + case BPF_ALU | BPF_ARSH | BPF_X: + case BPF_ALU64 | BPF_ARSH | BPF_X: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_sra(rd, rd, rs) : rv_sraw(rd, rd, rs), ctx); + break; + + /* dst = -dst */ + case BPF_ALU | BPF_NEG: + case BPF_ALU64 | BPF_NEG: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? + rv_sub(rd, RV_REG_ZERO, rd) : + rv_subw(rd, RV_REG_ZERO, rd), + ctx); + break; + + /* dst = BSWAP##imm(dst) */ + case BPF_ALU | BPF_END | BPF_FROM_LE: + { + int shift = 64 - imm; + + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_slli(rd, rd, shift), ctx); + emit(rv_srli(rd, rd, shift), ctx); + break; + } + case BPF_ALU | BPF_END | BPF_FROM_BE: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + + emit(rv_addi(RV_REG_T2, RV_REG_ZERO, 0), ctx); + + emit(rv_andi(RV_REG_T1, rd, 0xff), ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx); + emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx); + emit(rv_srli(rd, rd, 8), ctx); + if (imm == 16) + goto out_be; + + emit(rv_andi(RV_REG_T1, rd, 0xff), ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx); + emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx); + emit(rv_srli(rd, rd, 8), ctx); + + emit(rv_andi(RV_REG_T1, rd, 0xff), ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx); + emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx); + emit(rv_srli(rd, rd, 8), ctx); + if (imm == 32) + goto out_be; + + emit(rv_andi(RV_REG_T1, rd, 0xff), ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx); + emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx); + emit(rv_srli(rd, rd, 8), ctx); + + emit(rv_andi(RV_REG_T1, rd, 0xff), ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx); + emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx); + emit(rv_srli(rd, rd, 8), ctx); + + emit(rv_andi(RV_REG_T1, rd, 0xff), ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx); + emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx); + emit(rv_srli(rd, rd, 8), ctx); + + emit(rv_andi(RV_REG_T1, rd, 0xff), ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx); + emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx); + emit(rv_srli(rd, rd, 8), ctx); + out_be: + emit(rv_andi(RV_REG_T1, rd, 0xff), ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx); + + emit(rv_addi(rd, RV_REG_T2, 0), ctx); + break; + + /* dst = imm */ + case BPF_ALU | BPF_MOV | BPF_K: + case BPF_ALU64 | BPF_MOV | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(rd, imm, ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + + /* dst = dst OP imm */ + case BPF_ALU | BPF_ADD | BPF_K: + case BPF_ALU64 | BPF_ADD | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(imm)) { + emit(is64 ? rv_addi(rd, rd, imm) : + rv_addiw(rd, rd, imm), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + } + emit_imm(RV_REG_T1, imm, ctx); + emit(is64 ? rv_add(rd, rd, RV_REG_T1) : + rv_addw(rd, rd, RV_REG_T1), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_SUB | BPF_K: + case BPF_ALU64 | BPF_SUB | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(-imm)) { + emit(is64 ? rv_addi(rd, rd, -imm) : + rv_addiw(rd, rd, -imm), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + } + emit_imm(RV_REG_T1, imm, ctx); + emit(is64 ? rv_sub(rd, rd, RV_REG_T1) : + rv_subw(rd, rd, RV_REG_T1), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_AND | BPF_K: + case BPF_ALU64 | BPF_AND | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(imm)) { + emit(rv_andi(rd, rd, imm), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + } + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_and(rd, rd, RV_REG_T1), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_OR | BPF_K: + case BPF_ALU64 | BPF_OR | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(imm)) { + emit(rv_ori(rd, rd, imm), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + } + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_or(rd, rd, RV_REG_T1), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_XOR | BPF_K: + case BPF_ALU64 | BPF_XOR | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(imm)) { + emit(rv_xori(rd, rd, imm), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + } + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_xor(rd, rd, RV_REG_T1), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_MUL | BPF_K: + case BPF_ALU64 | BPF_MUL | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(is64 ? rv_mul(rd, rd, RV_REG_T1) : + rv_mulw(rd, rd, RV_REG_T1), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_DIV | BPF_K: + case BPF_ALU64 | BPF_DIV | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(is64 ? rv_divu(rd, rd, RV_REG_T1) : + rv_divuw(rd, rd, RV_REG_T1), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_MOD | BPF_K: + case BPF_ALU64 | BPF_MOD | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(is64 ? rv_remu(rd, rd, RV_REG_T1) : + rv_remuw(rd, rd, RV_REG_T1), ctx); + if (!is64) { + emit(rv_slli(rd, rd, 32), ctx); + emit(rv_srli(rd, rd, 32), ctx); + } + break; + case BPF_ALU | BPF_LSH | BPF_K: + case BPF_ALU64 | BPF_LSH | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_slli(rd, rd, imm) : + rv_slliw(rd, rd, imm), ctx); + break; + case BPF_ALU | BPF_RSH | BPF_K: + case BPF_ALU64 | BPF_RSH | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_srli(rd, rd, imm) : + rv_srliw(rd, rd, imm), ctx); + break; + case BPF_ALU | BPF_ARSH | BPF_K: + case BPF_ALU64 | BPF_ARSH | BPF_K: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(is64 ? rv_srai(rd, rd, imm) : + rv_sraiw(rd, rd, imm), ctx); + break; + + /* JUMP off */ + case BPF_JMP | BPF_JA: + rvoff = rv_offset(i + off, i, ctx); + if (!is_21b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, rvoff); + return -1; + } + + emit(rv_jal(RV_REG_ZERO, rvoff >> 1), ctx); + break; + + /* IF (dst COND src) JUMP off */ + case BPF_JMP | BPF_JEQ | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_beq(rd, rs, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JGT | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_bltu(rs, rd, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JLT | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_bltu(rd, rs, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JGE | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_bgeu(rd, rs, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JLE | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_bgeu(rs, rd, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JNE | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_bne(rd, rs, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JSGT | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_blt(rs, rd, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JSLT | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_blt(rd, rs, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JSGE | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_bge(rd, rs, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JSLE | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_bge(rs, rd, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JSET | BPF_X: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit(rv_and(RV_REG_T1, rd, rs), ctx); + emit(rv_bne(RV_REG_T1, RV_REG_ZERO, rvoff >> 1), ctx); + break; + + /* IF (dst COND imm) JUMP off */ + case BPF_JMP | BPF_JEQ | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_beq(rd, RV_REG_T1, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JGT | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_bltu(RV_REG_T1, rd, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JLT | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_bltu(rd, RV_REG_T1, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JGE | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_bgeu(rd, RV_REG_T1, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JLE | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_bgeu(RV_REG_T1, rd, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JNE | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_bne(rd, RV_REG_T1, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JSGT | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_blt(RV_REG_T1, rd, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JSLT | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_blt(rd, RV_REG_T1, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JSGE | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_bge(rd, RV_REG_T1, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JSLE | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + emit(rv_bge(RV_REG_T1, rd, rvoff >> 1), ctx); + break; + case BPF_JMP | BPF_JSET | BPF_K: + rvoff = rv_offset(i + off, i, ctx); + if (!is_13b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, (int)rvoff); + return -1; + } + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T2, imm, ctx); + emit(rv_and(RV_REG_T1, rd, RV_REG_T2), ctx); + emit(rv_bne(RV_REG_T1, RV_REG_ZERO, rvoff >> 1), ctx); + break; + + /* function call */ + case BPF_JMP | BPF_CALL: + { + bool fixed; + int i, ret; + u64 addr; + + seen_call(ctx); + ret = bpf_jit_get_func_addr(ctx->prog, insn, extra_pass, &addr, + &fixed); + if (ret < 0) + return ret; + if (fixed) { + emit_imm(RV_REG_T1, addr, ctx); + } else { + i = ctx->ninsns; + emit_imm(RV_REG_T1, addr, ctx); + for (i = ctx->ninsns - i; i < 8; i++) { + /* nop */ + emit(rv_addi(RV_REG_ZERO, RV_REG_ZERO, 0), + ctx); + } + } + emit(rv_jalr(RV_REG_RA, RV_REG_T1, 0), ctx); + rd = bpf_to_rv_reg(BPF_REG_0, ctx); + emit(rv_addi(rd, RV_REG_A0, 0), ctx); + break; + } + /* tail call */ + case BPF_JMP | BPF_TAIL_CALL: + rd = bpf_to_rv_reg(TAIL_CALL_REG, ctx); + pr_err("bpf-jit: tail call not supported yet!\n"); + return -1; + + /* function return */ + case BPF_JMP | BPF_EXIT: + if (i == ctx->prog->len - 1) + break; + + rvoff = epilogue_offset(ctx); + if (!is_21b_int(rvoff)) { + pr_err("bpf-jit: %d offset=%d not supported yet!\n", + __LINE__, rvoff); + return -1; + } + + emit(rv_jal(RV_REG_ZERO, rvoff >> 1), ctx); + break; + + /* dst = imm64 */ + case BPF_LD | BPF_IMM | BPF_DW: + { + struct bpf_insn insn1 = insn[1]; + u64 imm64; + + imm64 = (u64)insn1.imm << 32 | (u32)imm; + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(rd, imm64, ctx); + return 1; + } + + /* LDX: dst = *(size *)(src + off) */ + case BPF_LDX | BPF_MEM | BPF_B: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(off)) { + emit(rv_lbu(rd, off, rs), ctx); + break; + } + + emit_imm(RV_REG_T1, off, ctx); + emit(rv_add(RV_REG_T1, RV_REG_T1, rs), ctx); + emit(rv_lbu(rd, 0, RV_REG_T1), ctx); + break; + case BPF_LDX | BPF_MEM | BPF_H: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(off)) { + emit(rv_lhu(rd, off, rs), ctx); + break; + } + + emit_imm(RV_REG_T1, off, ctx); + emit(rv_add(RV_REG_T1, RV_REG_T1, rs), ctx); + emit(rv_lhu(rd, 0, RV_REG_T1), ctx); + break; + case BPF_LDX | BPF_MEM | BPF_W: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(off)) { + emit(rv_lwu(rd, off, rs), ctx); + break; + } + + emit_imm(RV_REG_T1, off, ctx); + emit(rv_add(RV_REG_T1, RV_REG_T1, rs), ctx); + emit(rv_lwu(rd, 0, RV_REG_T1), ctx); + break; + case BPF_LDX | BPF_MEM | BPF_DW: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(off)) { + emit(rv_ld(rd, off, rs), ctx); + break; + } + + emit_imm(RV_REG_T1, off, ctx); + emit(rv_add(RV_REG_T1, RV_REG_T1, rs), ctx); + emit(rv_ld(rd, 0, RV_REG_T1), ctx); + break; + + /* ST: *(size *)(dst + off) = imm */ + case BPF_ST | BPF_MEM | BPF_B: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + if (is_12b_int(off)) { + emit(rv_sb(rd, off, RV_REG_T1), ctx); + break; + } + + emit_imm(RV_REG_T2, off, ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, rd), ctx); + emit(rv_sb(RV_REG_T2, 0, RV_REG_T1), ctx); + break; + + case BPF_ST | BPF_MEM | BPF_H: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + if (is_12b_int(off)) { + emit(rv_sh(rd, off, RV_REG_T1), ctx); + break; + } + + emit_imm(RV_REG_T2, off, ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, rd), ctx); + emit(rv_sh(RV_REG_T2, 0, RV_REG_T1), ctx); + break; + case BPF_ST | BPF_MEM | BPF_W: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + if (is_12b_int(off)) { + emit(rv_sw(rd, off, RV_REG_T1), ctx); + break; + } + + emit_imm(RV_REG_T2, off, ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, rd), ctx); + emit(rv_sw(RV_REG_T2, 0, RV_REG_T1), ctx); + break; + case BPF_ST | BPF_MEM | BPF_DW: + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + emit_imm(RV_REG_T1, imm, ctx); + if (is_12b_int(off)) { + emit(rv_sd(rd, off, RV_REG_T1), ctx); + break; + } + + emit_imm(RV_REG_T2, off, ctx); + emit(rv_add(RV_REG_T2, RV_REG_T2, rd), ctx); + emit(rv_sd(RV_REG_T2, 0, RV_REG_T1), ctx); + break; + + /* STX: *(size *)(dst + off) = src */ + case BPF_STX | BPF_MEM | BPF_B: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(off)) { + emit(rv_sb(rd, off, rs), ctx); + break; + } + + emit_imm(RV_REG_T1, off, ctx); + emit(rv_add(RV_REG_T1, RV_REG_T1, rd), ctx); + emit(rv_sb(RV_REG_T1, 0, rs), ctx); + break; + case BPF_STX | BPF_MEM | BPF_H: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(off)) { + emit(rv_sh(rd, off, rs), ctx); + break; + } + + emit_imm(RV_REG_T1, off, ctx); + emit(rv_add(RV_REG_T1, RV_REG_T1, rd), ctx); + emit(rv_sh(RV_REG_T1, 0, rs), ctx); + break; + case BPF_STX | BPF_MEM | BPF_W: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(off)) { + emit(rv_sw(rd, off, rs), ctx); + break; + } + + emit_imm(RV_REG_T1, off, ctx); + emit(rv_add(RV_REG_T1, RV_REG_T1, rd), ctx); + emit(rv_sw(RV_REG_T1, 0, rs), ctx); + break; + case BPF_STX | BPF_MEM | BPF_DW: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (is_12b_int(off)) { + emit(rv_sd(rd, off, rs), ctx); + break; + } + + emit_imm(RV_REG_T1, off, ctx); + emit(rv_add(RV_REG_T1, RV_REG_T1, rd), ctx); + emit(rv_sd(RV_REG_T1, 0, rs), ctx); + break; + /* STX XADD: lock *(u32 *)(dst + off) += src */ + case BPF_STX | BPF_XADD | BPF_W: + /* STX XADD: lock *(u64 *)(dst + off) += src */ + case BPF_STX | BPF_XADD | BPF_DW: + rs = bpf_to_rv_reg(insn->src_reg, ctx); + rd = bpf_to_rv_reg(insn->dst_reg, ctx); + if (off) { + if (is_12b_int(off)) { + emit(rv_addi(RV_REG_T1, rd, off), ctx); + } else { + emit_imm(RV_REG_T1, off, ctx); + emit(rv_add(RV_REG_T1, RV_REG_T1, rd), ctx); + } + + rd = RV_REG_T1; + } + + emit(BPF_SIZE(code) == BPF_W ? + rv_amoadd_w(RV_REG_ZERO, rs, rd, 0, 0) : + rv_amoadd_d(RV_REG_ZERO, rs, rd, 0, 0), ctx); + break; + default: + pr_err("bpf-jit: unknown opcode %02x\n", code); + return -EINVAL; + } + + return 0; +} + +static void build_prologue(struct rv_jit_context *ctx) +{ + int stack_adjust = 0, store_offset, bpf_stack_adjust; + + if (seen_reg(RV_REG_RA, ctx)) + stack_adjust += 8; + stack_adjust += 8; /* RV_REG_FP */ + if (seen_reg(RV_REG_S1, ctx)) + stack_adjust += 8; + if (seen_reg(RV_REG_S2, ctx)) + stack_adjust += 8; + if (seen_reg(RV_REG_S3, ctx)) + stack_adjust += 8; + if (seen_reg(RV_REG_S4, ctx)) + stack_adjust += 8; + if (seen_reg(RV_REG_S5, ctx)) + stack_adjust += 8; + if (seen_reg(RV_REG_S6, ctx)) + stack_adjust += 8; + + stack_adjust = round_up(stack_adjust, 16); + bpf_stack_adjust = round_up(ctx->prog->aux->stack_depth, 16); + stack_adjust += bpf_stack_adjust; + + store_offset = stack_adjust - 8; + + emit(rv_addi(RV_REG_SP, RV_REG_SP, -stack_adjust), ctx); + + if (seen_reg(RV_REG_RA, ctx)) { + emit(rv_sd(RV_REG_SP, store_offset, RV_REG_RA), ctx); + store_offset -= 8; + } + emit(rv_sd(RV_REG_SP, store_offset, RV_REG_FP), ctx); + store_offset -= 8; + if (seen_reg(RV_REG_S1, ctx)) { + emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S1), ctx); + store_offset -= 8; + } + if (seen_reg(RV_REG_S2, ctx)) { + emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S2), ctx); + store_offset -= 8; + } + if (seen_reg(RV_REG_S3, ctx)) { + emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S3), ctx); + store_offset -= 8; + } + if (seen_reg(RV_REG_S4, ctx)) { + emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S4), ctx); + store_offset -= 8; + } + if (seen_reg(RV_REG_S5, ctx)) { + emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S5), ctx); + store_offset -= 8; + } + if (seen_reg(RV_REG_S6, ctx)) { + emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S6), ctx); + store_offset -= 8; + } + + emit(rv_addi(RV_REG_FP, RV_REG_SP, stack_adjust), ctx); + + if (bpf_stack_adjust) { + if (!seen_reg(RV_REG_S5, ctx)) + pr_warn("bpf-jit: not seen BPF_REG_FP, stack is %d\n", + bpf_stack_adjust); + emit(rv_addi(RV_REG_S5, RV_REG_SP, bpf_stack_adjust), ctx); + } + + ctx->stack_size = stack_adjust; +} + +static void build_epilogue(struct rv_jit_context *ctx) +{ + int stack_adjust = ctx->stack_size, store_offset = stack_adjust - 8; + + if (seen_reg(RV_REG_RA, ctx)) { + emit(rv_ld(RV_REG_RA, store_offset, RV_REG_SP), ctx); + store_offset -= 8; + } + emit(rv_ld(RV_REG_FP, store_offset, RV_REG_SP), ctx); + store_offset -= 8; + if (seen_reg(RV_REG_S1, ctx)) { + emit(rv_ld(RV_REG_S1, store_offset, RV_REG_SP), ctx); + store_offset -= 8; + } + if (seen_reg(RV_REG_S2, ctx)) { + emit(rv_ld(RV_REG_S2, store_offset, RV_REG_SP), ctx); + store_offset -= 8; + } + if (seen_reg(RV_REG_S3, ctx)) { + emit(rv_ld(RV_REG_S3, store_offset, RV_REG_SP), ctx); + store_offset -= 8; + } + if (seen_reg(RV_REG_S4, ctx)) { + emit(rv_ld(RV_REG_S4, store_offset, RV_REG_SP), ctx); + store_offset -= 8; + } + if (seen_reg(RV_REG_S5, ctx)) { + emit(rv_ld(RV_REG_S5, store_offset, RV_REG_SP), ctx); + store_offset -= 8; + } + if (seen_reg(RV_REG_S6, ctx)) { + emit(rv_ld(RV_REG_S6, store_offset, RV_REG_SP), ctx); + store_offset -= 8; + } + + emit(rv_addi(RV_REG_SP, RV_REG_SP, stack_adjust), ctx); + /* Set return value. */ + emit(rv_addi(RV_REG_A0, RV_REG_A5, 0), ctx); + emit(rv_jalr(RV_REG_ZERO, RV_REG_RA, 0), ctx); +} + +static int build_body(struct rv_jit_context *ctx, bool extra_pass) +{ + const struct bpf_prog *prog = ctx->prog; + int i; + + for (i = 0; i < prog->len; i++) { + const struct bpf_insn *insn = &prog->insnsi[i]; + int ret; + + ret = emit_insn(insn, ctx, extra_pass); + if (ret > 0) { + i++; + if (ctx->insns == NULL) + ctx->offset[i] = ctx->ninsns; + continue; + } + if (ctx->insns == NULL) + ctx->offset[i] = ctx->ninsns; + if (ret) + return ret; + } + return 0; +} + +static void bpf_fill_ill_insns(void *area, unsigned int size) +{ + memset(area, 0, size); +} + +static void bpf_flush_icache(void *start, void *end) +{ + flush_icache_range((unsigned long)start, (unsigned long)end); +} + struct bpf_prog *bpf_int_jit_compile(struct bpf_prog *prog) { + bool tmp_blinded = false, extra_pass = false; + struct bpf_prog *tmp, *orig_prog = prog; + struct rv_jit_data *jit_data; + struct rv_jit_context *ctx; + unsigned int image_size; + + if (!prog->jit_requested) + return orig_prog; + + tmp = bpf_jit_blind_constants(prog); + if (IS_ERR(tmp)) + return orig_prog; + if (tmp != prog) { + tmp_blinded = true; + prog = tmp; + } + + jit_data = prog->aux->jit_data; + if (!jit_data) { + jit_data = kzalloc(sizeof(*jit_data), GFP_KERNEL); + if (!jit_data) { + prog = orig_prog; + goto out; + } + prog->aux->jit_data = jit_data; + } + + ctx = &jit_data->ctx; + + if (ctx->offset) { + extra_pass = true; + image_size = sizeof(u32) * ctx->ninsns; + goto skip_init_ctx; + } + + ctx->prog = prog; + ctx->offset = kcalloc(prog->len, sizeof(int), GFP_KERNEL); + if (!ctx->offset) { + prog = orig_prog; + goto out_offset; + } + + /* First pass generates the ctx->offset, but does not emit an image. */ + if (build_body(ctx, extra_pass)) { + prog = orig_prog; + goto out_offset; + } + build_prologue(ctx); + ctx->epilogue_offset = ctx->ninsns; + build_epilogue(ctx); + + /* Allocate image, now that we know the size. */ + image_size = sizeof(u32) * ctx->ninsns; + jit_data->header = bpf_jit_binary_alloc(image_size, &jit_data->image, + sizeof(u32), + bpf_fill_ill_insns); + if (!jit_data->header) { + prog = orig_prog; + goto out_offset; + } + + /* Second, real pass, that acutally emits the image. */ + ctx->insns = (u32 *)jit_data->image; +skip_init_ctx: + ctx->ninsns = 0; + + build_prologue(ctx); + if (build_body(ctx, extra_pass)) { + bpf_jit_binary_free(jit_data->header); + prog = orig_prog; + goto out_offset; + } + build_epilogue(ctx); + + if (bpf_jit_enable > 1) + bpf_jit_dump(prog->len, image_size, 2, ctx->insns); + + prog->bpf_func = (void *)ctx->insns; + prog->jited = 1; + prog->jited_len = image_size; + + bpf_flush_icache(jit_data->header, (u8 *)ctx->insns + ctx->ninsns); + + if (!prog->is_func || extra_pass) { +out_offset: + kfree(ctx->offset); + kfree(jit_data); + prog->aux->jit_data = NULL; + } +out: + if (tmp_blinded) + bpf_jit_prog_release_other(prog, prog == orig_prog ? + tmp : orig_prog); return prog; } -- 2.19.1 From mboxrd@z Thu Jan 1 00:00:00 1970 Return-Path: X-Spam-Checker-Version: SpamAssassin 3.4.0 (2014-02-07) on aws-us-west-2-korg-lkml-1.web.codeaurora.org X-Spam-Level: X-Spam-Status: No, score=-9.0 required=3.0 tests=DKIM_ADSP_CUSTOM_MED, DKIM_SIGNED,DKIM_VALID,FREEMAIL_FORGED_FROMDOMAIN,FREEMAIL_FROM, HEADER_FROM_DIFFERENT_DOMAINS,INCLUDES_PATCH,MAILING_LIST_MULTI,SIGNED_OFF_BY, SPF_PASS,URIBL_BLOCKED,USER_AGENT_GIT autolearn=ham autolearn_force=no version=3.4.0 Received: from mail.kernel.org (mail.kernel.org [198.145.29.99]) by smtp.lore.kernel.org (Postfix) with ESMTP id 5F0B7C282C5 for ; Wed, 23 Jan 2019 22:41:55 +0000 (UTC) Received: from bombadil.infradead.org (bombadil.infradead.org [198.137.202.133]) (using TLSv1.2 with cipher ECDHE-RSA-AES256-GCM-SHA384 (256/256 bits)) (No client certificate requested) by mail.kernel.org (Postfix) with ESMTPS id 22DA921855 for ; Wed, 23 Jan 2019 22:41:55 +0000 (UTC) Authentication-Results: mail.kernel.org; dkim=pass (2048-bit key) header.d=lists.infradead.org header.i=@lists.infradead.org header.b="f2RU3a1y"; dkim=fail reason="signature verification failed" (2048-bit key) header.d=gmail.com header.i=@gmail.com header.b="f0In0suy" DMARC-Filter: OpenDMARC Filter v1.3.2 mail.kernel.org 22DA921855 Authentication-Results: mail.kernel.org; dmarc=fail (p=none dis=none) header.from=gmail.com Authentication-Results: mail.kernel.org; spf=none smtp.mailfrom=linux-riscv-bounces+infradead-linux-riscv=archiver.kernel.org@lists.infradead.org DKIM-Signature: v=1; a=rsa-sha256; q=dns/txt; c=relaxed/relaxed; d=lists.infradead.org; s=bombadil.20170209; h=Sender: Content-Transfer-Encoding:Content-Type:Cc:List-Subscribe:List-Help:List-Post: List-Archive:List-Unsubscribe:List-Id:MIME-Version:References:In-Reply-To: Message-Id:Date:Subject:To:From:Reply-To:Content-ID:Content-Description: Resent-Date:Resent-From:Resent-Sender:Resent-To:Resent-Cc:Resent-Message-ID: List-Owner; bh=lee/xmTHdmG/S7tqxro/nbhXLKHNVy9vkkV/ioGouOg=; b=f2RU3a1yA3QfWc JRA9f3X6tbhOty3HAAgFt16gwuLi0TKiX2OciSLXFq/tZm0A4CgONeUwNG9/CeJK9LruFkUEWI7Wk hd34vOuirgukJrzz6GsG2DimMdXFHWTWPBFcVUjdsFP6adOXuAlnQVKE4Kb6dazHPK5U5BzHNzYXV nhw0KZbPApGH/d0skyShVJTv2Ix3p8TH1nBBvN5mjhYMkjFUPYcSwjTLqPbQimAkFvDVAKcmRthYy h6jnQVr2Y2jJ+D6MpcgpGS51e8o2f7hAqHJx6XAMII3OjiEokrBLeL0m3UKcxVGlDelI+3WE7SM98 2wMe5CEuKNYINgPs+RmQ==; Received: from localhost ([127.0.0.1] helo=bombadil.infradead.org) by bombadil.infradead.org with esmtp (Exim 4.90_1 #2 (Red Hat Linux)) id 1gmRDV-0008H7-UW; Wed, 23 Jan 2019 22:41:53 +0000 Received: from mail-lj1-x241.google.com ([2a00:1450:4864:20::241]) by bombadil.infradead.org with esmtps (Exim 4.90_1 #2 (Red Hat Linux)) id 1gjKCY-0002v8-VX for linux-riscv@lists.infradead.org; Tue, 15 Jan 2019 08:36:07 +0000 Received: by mail-lj1-x241.google.com with SMTP id v15-v6so1520525ljh.13 for ; Tue, 15 Jan 2019 00:36:02 -0800 (PST) DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=gmail.com; s=20161025; h=from:to:cc:subject:date:message-id:in-reply-to:references :mime-version:content-transfer-encoding; bh=uPq7ZfCSIXyIRsibTm50nWPNGbHlUnFTPFoej6YpplU=; b=f0In0suyet9+f6dKFSicK6IW6pRBb/+0+0+jVdDNwUlF5lW2Fgo8piR0k4fWMBDQdB rigD55ou/u1tj/WaXNk4dE8ljTCSBjPmd+/G5uL+9CB9sx2GTKJ5hl17UU4AvfL/N98G FLHKhMAQR7z4AfhCOluW8xzVPFG9XAj63IlLMbIbzAcHApWVvSibOcxWpVDH1HNVxKa4 7Grv4QLbkenadhZyYfsPbUm9HiJySXniOj1KB9XFgAk9UR2fWE0eKIWSuxumeONTwq2a QlJ5c5dgegn9XVGHxIKh5ArPAqTPt4VMDL7aLYidxwK5kZcXepejeXEZTaKGQ/1MvILB Ejew== X-Google-DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=1e100.net; s=20161025; h=x-gm-message-state:from:to:cc:subject:date:message-id:in-reply-to :references:mime-version:content-transfer-encoding; bh=uPq7ZfCSIXyIRsibTm50nWPNGbHlUnFTPFoej6YpplU=; b=JSQ3Wz/BUdriPSqYcrJMPe2RkCVLOQv1WTCpmxUTUI3tJ78sFjs8CoTF/XIqTVzuhx V7YFz4K3mEAUEgR69f0f6sqxsLEegMNGltrHk13PJu3hQxmQKMUNeSKth5ae7PqNw8Z4 kXBMrTlye5C0+HvsE5ott55QCZNBWJEPEJ7fKehZDe3x63xql+rAUUCXTYq7Cqvux02A L1O+TJ42uZ2Pc6o+QhcnCAnSumeGYA5BzOLb8AXHppuZYuVCn7W9ISVNXT2U4zm2MvS6 F+YbKAnMWCPV4EQy+g3oi22qDJaJvc2vmr8ooo8xO2BVtqRAzALWOpuMfLS6p03sexx7 i1gg== X-Gm-Message-State: AJcUukcosoCqxsA/u7EOek8CfQNc7zBPZI42F2ZR8xxkm7fFz5TlnVJN 5vc8RCo04UdwU5oH7Oh0GF+4OBUy0nEKjw== X-Google-Smtp-Source: ALg8bN4SX1tIXo2r90GkBM1tAEmTvlcNKSFufEZtEhAXSzj79nCzA2ce04seChDcPx5Eonvcnf2k3Q== X-Received: by 2002:a2e:20c3:: with SMTP id g64-v6mr1985927lji.101.1547541359678; Tue, 15 Jan 2019 00:35:59 -0800 (PST) Received: from btopel-mobl.ger.intel.com ([213.100.199.124]) by smtp.gmail.com with ESMTPSA id e14-v6sm453004ljb.31.2019.01.15.00.35.58 (version=TLS1_2 cipher=ECDHE-RSA-AES128-GCM-SHA256 bits=128/128); Tue, 15 Jan 2019 00:35:59 -0800 (PST) From: =?UTF-8?q?Bj=C3=B6rn=20T=C3=B6pel?= To: linux-riscv@lists.infradead.org Subject: [RFC PATCH 3/3] bpf, riscv: added eBPF JIT for RV64G Date: Tue, 15 Jan 2019 09:35:18 +0100 Message-Id: <20190115083518.10149-4-bjorn.topel@gmail.com> X-Mailer: git-send-email 2.19.1 In-Reply-To: <20190115083518.10149-1-bjorn.topel@gmail.com> References: <20190115083518.10149-1-bjorn.topel@gmail.com> MIME-Version: 1.0 X-CRM114-Version: 20100106-BlameMichelson ( TRE 0.8.0 (BSD) ) MR-646709E3 X-CRM114-CacheID: sfid-20190115_003603_211195_579C8778 X-CRM114-Status: GOOD ( 14.31 ) X-Mailman-Approved-At: Wed, 23 Jan 2019 14:41:50 -0800 X-BeenThere: linux-riscv@lists.infradead.org X-Mailman-Version: 2.1.21 Precedence: list List-Id: List-Unsubscribe: , List-Archive: List-Post: List-Help: List-Subscribe: , Cc: =?UTF-8?q?Bj=C3=B6rn=20T=C3=B6pel?= , palmer@sifive.com, davidlee@sifive.com, daniel@iogearbox.net, netdev@vger.kernel.org Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: base64 Sender: "linux-riscv" Errors-To: linux-riscv-bounces+infradead-linux-riscv=archiver.kernel.org@lists.infradead.org VGhpcyBjb21taXQgYWRkcyBlQlBGIEpJVCBmb3IgUlY2NEcuCgpDb2Rld2lzZSwgaXQgbmVlZHMg c29tZSByZWZhY3RvcmluZy4gQ3VycmVudGx5IHRoZXJlJ3MgYSBiaXQgdG9vIG11Y2gKY29weS1h bmQtcGFzdGUgZ29pbmcgb24sIGFuZCBJIGtub3cgc29tZSBwbGFjZXMgd2hlcmUgSSBjb3VsZCBv cHRpbWl6ZQp0aGUgY29kZSBnZW5lcmF0aW9uIGEgYml0IChtb3N0bHkgQlBGX0sgdHlwZSBvZiBp bnN0cnVjdGlvbnMsIGRlYWxpbmcKd2l0aCBpbW1lZGlhdGVzKS4KCkZyb20gYSBmZWF0dXJlcyBw ZXJzcGVjdGl2ZSwgdHdvIHRoaW5ncyBhcmUgbWlzc2luZzoKCiogdGFpbCBjYWxscwoqICJmYXIt YnJhbmNoZXMiLCBpLmUuIGNvbmRpdGlvbmFsIGJyYW5jaGVzIHRoYXQgcmVhY2ggYmV5b25kIDEz Yi4KClRoZSB0ZXN0X2JwZi5rbyBwYXNzZXMgYWxsIHRlc3RzLgoKU2lnbmVkLW9mZi1ieTogQmrD tnJuIFTDtnBlbCA8Ympvcm4udG9wZWxAZ21haWwuY29tPgotLS0KIGFyY2gvcmlzY3YvbmV0L2Jw Zl9qaXRfY29tcC5jIHwgMTYwOCArKysrKysrKysrKysrKysrKysrKysrKysrKysrKysrKysKIDEg ZmlsZSBjaGFuZ2VkLCAxNjA4IGluc2VydGlvbnMoKykKCmRpZmYgLS1naXQgYS9hcmNoL3Jpc2N2 L25ldC9icGZfaml0X2NvbXAuYyBiL2FyY2gvcmlzY3YvbmV0L2JwZl9qaXRfY29tcC5jCmluZGV4 IDdlMzU5ZDMyNDllZS4uNTYyZDU2ZWI4ZDIzIDEwMDY0NAotLS0gYS9hcmNoL3Jpc2N2L25ldC9i cGZfaml0X2NvbXAuYworKysgYi9hcmNoL3Jpc2N2L25ldC9icGZfaml0X2NvbXAuYwpAQCAtMSw0 ICsxLDE2MTIgQEAKKy8vIFNQRFgtTGljZW5zZS1JZGVudGlmaWVyOiBHUEwtMi4wCisvKgorICog QlBGIEpJVCBjb21waWxlciBmb3IgUlY2NEcKKyAqCisgKiBDb3B5cmlnaHQoYykgMjAxOSBCasO2 cm4gVMO2cGVsIDxiam9ybi50b3BlbEBnbWFpbC5jb20+CisgKgorICovCisKKyNpbmNsdWRlIDxs aW51eC9icGYuaD4KKyNpbmNsdWRlIDxsaW51eC9maWx0ZXIuaD4KKyNpbmNsdWRlIDxhc20vY2Fj aGVmbHVzaC5oPgorCisjZGVmaW5lIFRNUF9SRUdfMCAoTUFYX0JQRl9KSVRfUkVHICsgMCkKKyNk ZWZpbmUgVE1QX1JFR18xIChNQVhfQlBGX0pJVF9SRUcgKyAxKQorI2RlZmluZSBUQUlMX0NBTExf UkVHIChNQVhfQlBGX0pJVF9SRUcgKyAyKQorCitlbnVtIHJ2X3JlZ2lzdGVyIHsKKwlSVl9SRUdf WkVSTyA9CTAsCS8qIFRoZSBjb25zdGFudCB2YWx1ZSAwICovCisJUlZfUkVHX1JBID0JMSwJLyog UmV0dXJuIGFkZHJlc3MgKi8KKwlSVl9SRUdfU1AgPQkyLAkvKiBTdGFjayBwb2ludGVyICovCisJ UlZfUkVHX0dQID0JMywJLyogR2xvYmFsIHBvaW50ZXIgKi8KKwlSVl9SRUdfVFAgPQk0LAkvKiBU aHJlYWQgcG9pbnRlciAqLworCVJWX1JFR19UMCA9CTUsCS8qIFRlbXBvcmFyaWVzICovCisJUlZf UkVHX1QxID0JNiwKKwlSVl9SRUdfVDIgPQk3LAorCVJWX1JFR19GUCA9CTgsCisJUlZfUkVHX1Mx ID0JOSwJLyogU2F2ZWQgcmVnaXN0ZXJzICovCisJUlZfUkVHX0EwID0JMTAsCS8qIEZ1bmN0aW9u IGFyZ3VtZW50L3JldHVybiB2YWx1ZXMgKi8KKwlSVl9SRUdfQTEgPQkxMSwJLyogRnVuY3Rpb24g YXJndW1lbnRzICovCisJUlZfUkVHX0EyID0JMTIsCisJUlZfUkVHX0EzID0JMTMsCisJUlZfUkVH X0E0ID0JMTQsCisJUlZfUkVHX0E1ID0JMTUsCisJUlZfUkVHX0E2ID0JMTYsCisJUlZfUkVHX0E3 ID0JMTcsCisJUlZfUkVHX1MyID0JMTgsCS8qIFNhdmVkIHJlZ2lzdGVycyAqLworCVJWX1JFR19T MyA9CTE5LAorCVJWX1JFR19TNCA9CTIwLAorCVJWX1JFR19TNSA9CTIxLAorCVJWX1JFR19TNiA9 CTIyLAorCVJWX1JFR19TNyA9CTIzLAorCVJWX1JFR19TOCA9CTI0LAorCVJWX1JFR19TOSA9CTI1 LAorCVJWX1JFR19TMTAgPQkyNiwKKwlSVl9SRUdfUzExID0JMjcsCisJUlZfUkVHX1QzID0JMjgs CS8qIFRlbXBvcmFyaWVzICovCisJUlZfUkVHX1Q0ID0JMjksCisJUlZfUkVHX1Q1ID0JMzAsCisJ UlZfUkVHX1Q2ID0JMzEsCit9OworCitzdHJ1Y3QgcnZfaml0X2NvbnRleHQgeworCXN0cnVjdCBi cGZfcHJvZyAqcHJvZzsKKwl1MzIgKmluc25zOyAvKiBSViBpbnNucyAqLworCWludCBuaW5zbnM7 CisJaW50IGVwaWxvZ3VlX29mZnNldDsKKwlpbnQgKm9mZnNldDsgLyogQlBGIHRvIFJWICovCisJ dW5zaWduZWQgbG9uZyBzZWVuX3JlZ19iaXRzOworCWludCBzdGFja19zaXplOworfTsKKworc3Ry dWN0IHJ2X2ppdF9kYXRhIHsKKwlzdHJ1Y3QgYnBmX2JpbmFyeV9oZWFkZXIgKmhlYWRlcjsKKwl1 OCAqaW1hZ2U7CisJc3RydWN0IHJ2X2ppdF9jb250ZXh0IGN0eDsKK307CisKK3N0YXRpYyB1OCBi cGZfdG9fcnZfcmVnKGludCBicGZfcmVnLCBzdHJ1Y3QgcnZfaml0X2NvbnRleHQgKmN0eCkKK3sK Kwlzd2l0Y2ggKGJwZl9yZWcpIHsKKwkvKiBSZXR1cm4gdmFsdWUgKi8KKwljYXNlIEJQRl9SRUdf MDoKKwkJX19zZXRfYml0KFJWX1JFR19BNSwgJmN0eC0+c2Vlbl9yZWdfYml0cyk7CisJCXJldHVy biBSVl9SRUdfQTU7CisJLyogRnVuY3Rpb24gYXJndW1lbnRzICovCisJY2FzZSBCUEZfUkVHXzE6 CisJCV9fc2V0X2JpdChSVl9SRUdfQTAsICZjdHgtPnNlZW5fcmVnX2JpdHMpOworCQlyZXR1cm4g UlZfUkVHX0EwOworCWNhc2UgQlBGX1JFR18yOgorCQlfX3NldF9iaXQoUlZfUkVHX0ExLCAmY3R4 LT5zZWVuX3JlZ19iaXRzKTsKKwkJcmV0dXJuIFJWX1JFR19BMTsKKwljYXNlIEJQRl9SRUdfMzoK KwkJX19zZXRfYml0KFJWX1JFR19BMiwgJmN0eC0+c2Vlbl9yZWdfYml0cyk7CisJCXJldHVybiBS Vl9SRUdfQTI7CisJY2FzZSBCUEZfUkVHXzQ6CisJCV9fc2V0X2JpdChSVl9SRUdfQTMsICZjdHgt PnNlZW5fcmVnX2JpdHMpOworCQlyZXR1cm4gUlZfUkVHX0EzOworCWNhc2UgQlBGX1JFR181Ogor CQlfX3NldF9iaXQoUlZfUkVHX0E0LCAmY3R4LT5zZWVuX3JlZ19iaXRzKTsKKwkJcmV0dXJuIFJW X1JFR19BNDsKKwkvKiBDYWxsZWUgc2F2ZWQgcmVnaXN0ZXJzICovCisJY2FzZSBCUEZfUkVHXzY6 CisJCV9fc2V0X2JpdChSVl9SRUdfUzEsICZjdHgtPnNlZW5fcmVnX2JpdHMpOworCQlyZXR1cm4g UlZfUkVHX1MxOworCWNhc2UgQlBGX1JFR183OgorCQlfX3NldF9iaXQoUlZfUkVHX1MyLCAmY3R4 LT5zZWVuX3JlZ19iaXRzKTsKKwkJcmV0dXJuIFJWX1JFR19TMjsKKwljYXNlIEJQRl9SRUdfODoK KwkJX19zZXRfYml0KFJWX1JFR19TMywgJmN0eC0+c2Vlbl9yZWdfYml0cyk7CisJCXJldHVybiBS Vl9SRUdfUzM7CisJY2FzZSBCUEZfUkVHXzk6CisJCV9fc2V0X2JpdChSVl9SRUdfUzQsICZjdHgt PnNlZW5fcmVnX2JpdHMpOworCQlyZXR1cm4gUlZfUkVHX1M0OworCS8qIFN0YWNrIHJlYWQtb25s eSBmcmFtZSBwb2ludGVyIHRvIGFjY2VzcyBzdGFjayAqLworCWNhc2UgQlBGX1JFR19GUDoKKwkJ X19zZXRfYml0KFJWX1JFR19TNSwgJmN0eC0+c2Vlbl9yZWdfYml0cyk7CisJCXJldHVybiBSVl9S RUdfUzU7CisJLyogVGVtcG9yYXJ5IHJlZ2lzdGVyICovCisJY2FzZSBCUEZfUkVHX0FYOgorCQlf X3NldF9iaXQoUlZfUkVHX1QwLCAmY3R4LT5zZWVuX3JlZ19iaXRzKTsKKwkJcmV0dXJuIFJWX1JF R19UMDsKKwkvKiBUYWlsIGNhbGwgY291bnRlciAqLworCWNhc2UgVEFJTF9DQUxMX1JFRzoKKwkJ X19zZXRfYml0KFJWX1JFR19TNiwgJmN0eC0+c2Vlbl9yZWdfYml0cyk7CisJCXJldHVybiBSVl9S RUdfUzY7CisJZGVmYXVsdDoKKwkJcmV0dXJuIDA7CisJfQorfTsKKworc3RhdGljIHZvaWQgc2Vl bl9jYWxsKHN0cnVjdCBydl9qaXRfY29udGV4dCAqY3R4KQoreworCV9fc2V0X2JpdChSVl9SRUdf UkEsICZjdHgtPnNlZW5fcmVnX2JpdHMpOworfQorCitzdGF0aWMgYm9vbCBzZWVuX3JlZyhpbnQg cnZfcmVnLCBzdHJ1Y3QgcnZfaml0X2NvbnRleHQgKmN0eCkKK3sKKwlyZXR1cm4gdGVzdF9iaXQo cnZfcmVnLCAmY3R4LT5zZWVuX3JlZ19iaXRzKTsKK30KKworc3RhdGljIHZvaWQgZW1pdChjb25z dCB1MzIgaW5zbiwgc3RydWN0IHJ2X2ppdF9jb250ZXh0ICpjdHgpCit7CisJaWYgKGN0eC0+aW5z bnMpCisJCWN0eC0+aW5zbnNbY3R4LT5uaW5zbnNdID0gaW5zbjsKKworCWN0eC0+bmluc25zKys7 Cit9CisKK3N0YXRpYyB1MzIgcnZfcl9pbnNuKHU4IGZ1bmN0NywgdTggcnMyLCB1OCByczEsIHU4 IGZ1bmN0MywgdTggcmQsIHU4IG9wY29kZSkKK3sKKwlyZXR1cm4gKGZ1bmN0NyA8PCAyNSkgfCAo cnMyIDw8IDIwKSB8IChyczEgPDwgMTUpIHwgKGZ1bmN0MyA8PCAxMikgfAorCQkocmQgPDwgNykg fCBvcGNvZGU7Cit9CisKK3N0YXRpYyB1MzIgcnZfaV9pbnNuKHUxNiBpbW0xMV8wLCB1OCByczEs IHU4IGZ1bmN0MywgdTggcmQsIHU4IG9wY29kZSkKK3sKKwlyZXR1cm4gKGltbTExXzAgPDwgMjAp IHwgKHJzMSA8PCAxNSkgfCAoZnVuY3QzIDw8IDEyKSB8IChyZCA8PCA3KSB8CisJCW9wY29kZTsK K30KKworc3RhdGljIHUzMiBydl9zX2luc24odTE2IGltbTExXzAsIHU4IHJzMiwgdTggcnMxLCB1 OCBmdW5jdDMsIHU4IG9wY29kZSkKK3sKKwl1OCBpbW0xMV81ID0gaW1tMTFfMCA+PiA1LCBpbW00 XzAgPSBpbW0xMV8wICYgMHgxZjsKKworCXJldHVybiAoaW1tMTFfNSA8PCAyNSkgfCAocnMyIDw8 IDIwKSB8IChyczEgPDwgMTUpIHwgKGZ1bmN0MyA8PCAxMikgfAorCQkoaW1tNF8wIDw8IDcpIHwg b3Bjb2RlOworfQorCitzdGF0aWMgdTMyIHJ2X3NiX2luc24odTE2IGltbTEyXzEsIHU4IHJzMiwg dTggcnMxLCB1OCBmdW5jdDMsIHU4IG9wY29kZSkKK3sKKwl1OCBpbW0xMiA9ICgoaW1tMTJfMSAm IDB4ODAwKSA+PiA1KSB8ICgoaW1tMTJfMSAmIDB4M2YwKSA+PiA0KTsKKwl1OCBpbW00XzEgPSAo KGltbTEyXzEgJiAweGYpIDw8IDEpIHwgKChpbW0xMl8xICYgMHg0MDApID4+IDEwKTsKKworCXJl dHVybiAoaW1tMTIgPDwgMjUpIHwgKHJzMiA8PCAyMCkgfCAocnMxIDw8IDE1KSB8IChmdW5jdDMg PDwgMTIpIHwKKwkJKGltbTRfMSA8PCA3KSB8IG9wY29kZTsKK30KKworc3RhdGljIHUzMiBydl91 X2luc24odTMyIGltbTMxXzEyLCB1OCByZCwgdTggb3Bjb2RlKQoreworCXJldHVybiAoaW1tMzFf MTIgPDwgMTIpIHwgKHJkIDw8IDcpIHwgb3Bjb2RlOworfQorCitzdGF0aWMgdTMyIHJ2X3VqX2lu c24odTMyIGltbTIwXzEsIHU4IHJkLCB1OCBvcGNvZGUpCit7CisJdTMyIGltbTsKKworCWltbSA9 IChpbW0yMF8xICYgMHg4MDAwMCkgfCAgKChpbW0yMF8xICYgMHgzZmYpIDw8IDkpIHwKKwkgICAg ICAoKGltbTIwXzEgJiAweDQwMCkgPj4gMikgfCAoKGltbTIwXzEgJiAweDdmODAwKSA+PiAxMSk7 CisKKwlyZXR1cm4gKGltbSA8PCAxMikgfCAocmQgPDwgNykgfCBvcGNvZGU7Cit9CisKK3N0YXRp YyB1MzIgcnZfYW1vX2luc24odTggZnVuY3Q1LCB1OCBhcSwgdTggcmwsIHU4IHJzMiwgdTggcnMx LAorCQkgICAgICAgdTggZnVuY3QzLCB1OCByZCwgdTggb3Bjb2RlKQoreworCXU4IGZ1bmN0NyA9 IChmdW5jdDUgPDwgMikgfCAoYXEgPDwgMSkgfCBybDsKKworCXJldHVybiBydl9yX2luc24oZnVu Y3Q3LCByczIsIHJzMSwgZnVuY3QzLCByZCwgb3Bjb2RlKTsKK30KKworc3RhdGljIHUzMiBydl9h ZGRpdyh1OCByZCwgdTggcnMxLCB1MTYgaW1tMTFfMCkKK3sKKwlyZXR1cm4gcnZfaV9pbnNuKGlt bTExXzAsIHJzMSwgMCwgcmQsIDB4MWIpOworfQorCitzdGF0aWMgdTMyIHJ2X2FkZGkodTggcmQs IHU4IHJzMSwgdTE2IGltbTExXzApCit7CisJcmV0dXJuIHJ2X2lfaW5zbihpbW0xMV8wLCByczEs IDAsIHJkLCAweDEzKTsKK30KKworc3RhdGljIHUzMiBydl9hZGR3KHU4IHJkLCB1OCByczEsIHU4 IHJzMikKK3sKKwlyZXR1cm4gcnZfcl9pbnNuKDAsIHJzMiwgcnMxLCAwLCByZCwgMHgzYik7Cit9 CisKK3N0YXRpYyB1MzIgcnZfYWRkKHU4IHJkLCB1OCByczEsIHU4IHJzMikKK3sKKwlyZXR1cm4g cnZfcl9pbnNuKDAsIHJzMiwgcnMxLCAwLCByZCwgMHgzMyk7Cit9CisKK3N0YXRpYyB1MzIgcnZf c3Vidyh1OCByZCwgdTggcnMxLCB1OCByczIpCit7CisJcmV0dXJuIHJ2X3JfaW5zbigweDIwLCBy czIsIHJzMSwgMCwgcmQsIDB4M2IpOworfQorCitzdGF0aWMgdTMyIHJ2X3N1Yih1OCByZCwgdTgg cnMxLCB1OCByczIpCit7CisJcmV0dXJuIHJ2X3JfaW5zbigweDIwLCByczIsIHJzMSwgMCwgcmQs IDB4MzMpOworfQorCitzdGF0aWMgdTMyIHJ2X2FuZCh1OCByZCwgdTggcnMxLCB1OCByczIpCit7 CisJcmV0dXJuIHJ2X3JfaW5zbigwLCByczIsIHJzMSwgNywgcmQsIDB4MzMpOworfQorCitzdGF0 aWMgdTMyIHJ2X29yKHU4IHJkLCB1OCByczEsIHU4IHJzMikKK3sKKwlyZXR1cm4gcnZfcl9pbnNu KDAsIHJzMiwgcnMxLCA2LCByZCwgMHgzMyk7Cit9CisKK3N0YXRpYyB1MzIgcnZfeG9yKHU4IHJk LCB1OCByczEsIHU4IHJzMikKK3sKKwlyZXR1cm4gcnZfcl9pbnNuKDAsIHJzMiwgcnMxLCA0LCBy ZCwgMHgzMyk7Cit9CisKK3N0YXRpYyB1MzIgcnZfbXVsdyh1OCByZCwgdTggcnMxLCB1OCByczIp Cit7CisJcmV0dXJuIHJ2X3JfaW5zbigxLCByczIsIHJzMSwgMCwgcmQsIDB4M2IpOworfQorCitz dGF0aWMgdTMyIHJ2X211bCh1OCByZCwgdTggcnMxLCB1OCByczIpCit7CisJcmV0dXJuIHJ2X3Jf aW5zbigxLCByczIsIHJzMSwgMCwgcmQsIDB4MzMpOworfQorCitzdGF0aWMgdTMyIHJ2X2RpdnV3 KHU4IHJkLCB1OCByczEsIHU4IHJzMikKK3sKKwlyZXR1cm4gcnZfcl9pbnNuKDEsIHJzMiwgcnMx LCA1LCByZCwgMHgzYik7Cit9CisKK3N0YXRpYyB1MzIgcnZfZGl2dSh1OCByZCwgdTggcnMxLCB1 OCByczIpCit7CisJcmV0dXJuIHJ2X3JfaW5zbigxLCByczIsIHJzMSwgNSwgcmQsIDB4MzMpOwor fQorCitzdGF0aWMgdTMyIHJ2X3JlbXV3KHU4IHJkLCB1OCByczEsIHU4IHJzMikKK3sKKwlyZXR1 cm4gcnZfcl9pbnNuKDEsIHJzMiwgcnMxLCA3LCByZCwgMHgzYik7Cit9CisKK3N0YXRpYyB1MzIg cnZfcmVtdSh1OCByZCwgdTggcnMxLCB1OCByczIpCit7CisJcmV0dXJuIHJ2X3JfaW5zbigxLCBy czIsIHJzMSwgNywgcmQsIDB4MzMpOworfQorCitzdGF0aWMgdTMyIHJ2X3NsbHcodTggcmQsIHU4 IHJzMSwgdTggcnMyKQoreworCXJldHVybiBydl9yX2luc24oMCwgcnMyLCByczEsIDEsIHJkLCAw eDNiKTsKK30KKworc3RhdGljIHUzMiBydl9zbGwodTggcmQsIHU4IHJzMSwgdTggcnMyKQorewor CXJldHVybiBydl9yX2luc24oMCwgcnMyLCByczEsIDEsIHJkLCAweDMzKTsKK30KKworc3RhdGlj IHUzMiBydl9zcmx3KHU4IHJkLCB1OCByczEsIHU4IHJzMikKK3sKKwlyZXR1cm4gcnZfcl9pbnNu KDAsIHJzMiwgcnMxLCA1LCByZCwgMHgzYik7Cit9CisKK3N0YXRpYyB1MzIgcnZfc3JsKHU4IHJk LCB1OCByczEsIHU4IHJzMikKK3sKKwlyZXR1cm4gcnZfcl9pbnNuKDAsIHJzMiwgcnMxLCA1LCBy ZCwgMHgzMyk7Cit9CisKK3N0YXRpYyB1MzIgcnZfc3Jhdyh1OCByZCwgdTggcnMxLCB1OCByczIp Cit7CisJcmV0dXJuIHJ2X3JfaW5zbigweDIwLCByczIsIHJzMSwgNSwgcmQsIDB4M2IpOworfQor CitzdGF0aWMgdTMyIHJ2X3NyYSh1OCByZCwgdTggcnMxLCB1OCByczIpCit7CisJcmV0dXJuIHJ2 X3JfaW5zbigweDIwLCByczIsIHJzMSwgNSwgcmQsIDB4MzMpOworfQorCitzdGF0aWMgdTMyIHJ2 X2x1aSh1OCByZCwgdTMyIGltbTMxXzEyKQoreworCXJldHVybiBydl91X2luc24oaW1tMzFfMTIs IHJkLCAweDM3KTsKK30KKworc3RhdGljIHUzMiBydl9zbGxpKHU4IHJkLCB1OCByczEsIHUxNiBp bW0xMV8wKQoreworCXJldHVybiBydl9pX2luc24oaW1tMTFfMCwgcnMxLCAxLCByZCwgMHgxMyk7 Cit9CisKK3N0YXRpYyB1MzIgcnZfYW5kaSh1OCByZCwgdTggcnMxLCB1MTYgaW1tMTFfMCkKK3sK KwlyZXR1cm4gcnZfaV9pbnNuKGltbTExXzAsIHJzMSwgNywgcmQsIDB4MTMpOworfQorCitzdGF0 aWMgdTMyIHJ2X29yaSh1OCByZCwgdTggcnMxLCB1MTYgaW1tMTFfMCkKK3sKKwlyZXR1cm4gcnZf aV9pbnNuKGltbTExXzAsIHJzMSwgNiwgcmQsIDB4MTMpOworfQorCitzdGF0aWMgdTMyIHJ2X3hv cmkodTggcmQsIHU4IHJzMSwgdTE2IGltbTExXzApCit7CisJcmV0dXJuIHJ2X2lfaW5zbihpbW0x MV8wLCByczEsIDQsIHJkLCAweDEzKTsKK30KKworc3RhdGljIHUzMiBydl9zbGxpdyh1OCByZCwg dTggcnMxLCB1MTYgaW1tMTFfMCkKK3sKKwlyZXR1cm4gcnZfaV9pbnNuKGltbTExXzAsIHJzMSwg MSwgcmQsIDB4MWIpOworfQorCitzdGF0aWMgdTMyIHJ2X3NybGl3KHU4IHJkLCB1OCByczEsIHUx NiBpbW0xMV8wKQoreworCXJldHVybiBydl9pX2luc24oaW1tMTFfMCwgcnMxLCA1LCByZCwgMHgx Yik7Cit9CisKK3N0YXRpYyB1MzIgcnZfc3JsaSh1OCByZCwgdTggcnMxLCB1MTYgaW1tMTFfMCkK K3sKKwlyZXR1cm4gcnZfaV9pbnNuKGltbTExXzAsIHJzMSwgNSwgcmQsIDB4MTMpOworfQorCitz dGF0aWMgdTMyIHJ2X3NyYWl3KHU4IHJkLCB1OCByczEsIHUxNiBpbW0xMV8wKQoreworCXJldHVy biBydl9pX2luc24oMHg0MDAgfCBpbW0xMV8wLCByczEsIDUsIHJkLCAweDFiKTsKK30KKworc3Rh dGljIHUzMiBydl9zcmFpKHU4IHJkLCB1OCByczEsIHUxNiBpbW0xMV8wKQoreworCXJldHVybiBy dl9pX2luc24oMHg0MDAgfCBpbW0xMV8wLCByczEsIDUsIHJkLCAweDEzKTsKK30KKworI2lmIDAK K3N0YXRpYyB1MzIgcnZfYXVpcGModTggcmQsIHUzMiBpbW0zMV8xMikKK3sKKwlyZXR1cm4gcnZf dV9pbnNuKGltbTMxXzEyLCByZCwgMHgxNyk7Cit9CisjZW5kaWYKKworc3RhdGljIHUzMiBydl9q YWwodTggcmQsIHUzMiBpbW0yMF8xKQoreworCXJldHVybiBydl91al9pbnNuKGltbTIwXzEsIHJk LCAweDZmKTsKK30KKworc3RhdGljIHUzMiBydl9qYWxyKHU4IHJkLCB1OCByczEsIHUxNiBpbW0x MV8wKQoreworCXJldHVybiBydl9pX2luc24oaW1tMTFfMCwgcnMxLCAwLCByZCwgMHg2Nyk7Cit9 CisKK3N0YXRpYyB1MzIgcnZfYmVxKHU4IHJzMSwgdTggcnMyLCB1MTYgaW1tMTJfMSkKK3sKKwly ZXR1cm4gcnZfc2JfaW5zbihpbW0xMl8xLCByczIsIHJzMSwgMCwgMHg2Myk7Cit9CisKK3N0YXRp YyB1MzIgcnZfYmx0dSh1OCByczEsIHU4IHJzMiwgdTE2IGltbTEyXzEpCit7CisJcmV0dXJuIHJ2 X3NiX2luc24oaW1tMTJfMSwgcnMyLCByczEsIDYsIDB4NjMpOworfQorCitzdGF0aWMgdTMyIHJ2 X2JnZXUodTggcnMxLCB1OCByczIsIHUxNiBpbW0xMl8xKQoreworCXJldHVybiBydl9zYl9pbnNu KGltbTEyXzEsIHJzMiwgcnMxLCA3LCAweDYzKTsKK30KKworc3RhdGljIHUzMiBydl9ibmUodTgg cnMxLCB1OCByczIsIHUxNiBpbW0xMl8xKQoreworCXJldHVybiBydl9zYl9pbnNuKGltbTEyXzEs IHJzMiwgcnMxLCAxLCAweDYzKTsKK30KKworc3RhdGljIHUzMiBydl9ibHQodTggcnMxLCB1OCBy czIsIHUxNiBpbW0xMl8xKQoreworCXJldHVybiBydl9zYl9pbnNuKGltbTEyXzEsIHJzMiwgcnMx LCA0LCAweDYzKTsKK30KKworc3RhdGljIHUzMiBydl9iZ2UodTggcnMxLCB1OCByczIsIHUxNiBp bW0xMl8xKQoreworCXJldHVybiBydl9zYl9pbnNuKGltbTEyXzEsIHJzMiwgcnMxLCA1LCAweDYz KTsKK30KKworc3RhdGljIHUzMiBydl9zYih1OCByczEsIHUxNiBpbW0xMV8wLCB1OCByczIpCit7 CisJcmV0dXJuIHJ2X3NfaW5zbihpbW0xMV8wLCByczIsIHJzMSwgMCwgMHgyMyk7Cit9CisKK3N0 YXRpYyB1MzIgcnZfc2godTggcnMxLCB1MTYgaW1tMTFfMCwgdTggcnMyKQoreworCXJldHVybiBy dl9zX2luc24oaW1tMTFfMCwgcnMyLCByczEsIDEsIDB4MjMpOworfQorCitzdGF0aWMgdTMyIHJ2 X3N3KHU4IHJzMSwgdTE2IGltbTExXzAsIHU4IHJzMikKK3sKKwlyZXR1cm4gcnZfc19pbnNuKGlt bTExXzAsIHJzMiwgcnMxLCAyLCAweDIzKTsKK30KKworc3RhdGljIHUzMiBydl9zZCh1OCByczEs IHUxNiBpbW0xMV8wLCB1OCByczIpCit7CisJcmV0dXJuIHJ2X3NfaW5zbihpbW0xMV8wLCByczIs IHJzMSwgMywgMHgyMyk7Cit9CisKKyNpZiAwCitzdGF0aWMgdTMyIHJ2X2xiKHU4IHJkLCB1MTYg aW1tMTFfMCwgdTggcnMxKQoreworCXJldHVybiBydl9pX2luc24oaW1tMTFfMCwgcnMxLCAwLCBy ZCwgMHgwMyk7Cit9CisjZW5kaWYKKworc3RhdGljIHUzMiBydl9sYnUodTggcmQsIHUxNiBpbW0x MV8wLCB1OCByczEpCit7CisJcmV0dXJuIHJ2X2lfaW5zbihpbW0xMV8wLCByczEsIDQsIHJkLCAw eDAzKTsKK30KKworI2lmIDAKK3N0YXRpYyB1MzIgcnZfbGgodTggcmQsIHUxNiBpbW0xMV8wLCB1 OCByczEpCit7CisJcmV0dXJuIHJ2X2lfaW5zbihpbW0xMV8wLCByczEsIDEsIHJkLCAweDAzKTsK K30KKyNlbmRpZgorCitzdGF0aWMgdTMyIHJ2X2xodSh1OCByZCwgdTE2IGltbTExXzAsIHU4IHJz MSkKK3sKKwlyZXR1cm4gcnZfaV9pbnNuKGltbTExXzAsIHJzMSwgNSwgcmQsIDB4MDMpOworfQor CisjaWYgMAorc3RhdGljIHUzMiBydl9sdyh1OCByZCwgdTE2IGltbTExXzAsIHU4IHJzMSkKK3sK KwlyZXR1cm4gcnZfaV9pbnNuKGltbTExXzAsIHJzMSwgMiwgcmQsIDB4MDMpOworfQorI2VuZGlm CisKK3N0YXRpYyB1MzIgcnZfbHd1KHU4IHJkLCB1MTYgaW1tMTFfMCwgdTggcnMxKQoreworCXJl dHVybiBydl9pX2luc24oaW1tMTFfMCwgcnMxLCA2LCByZCwgMHgwMyk7Cit9CisKK3N0YXRpYyB1 MzIgcnZfbGQodTggcmQsIHUxNiBpbW0xMV8wLCB1OCByczEpCit7CisJcmV0dXJuIHJ2X2lfaW5z bihpbW0xMV8wLCByczEsIDMsIHJkLCAweDAzKTsKK30KKworc3RhdGljIHUzMiBydl9hbW9hZGRf dyh1OCByZCwgdTggcnMyLCB1OCByczEsIHU4IGFxLCB1OCBybCkKK3sKKwlyZXR1cm4gcnZfYW1v X2luc24oMCwgYXEsIHJsLCByczIsIHJzMSwgMiwgcmQsIDB4MmYpOworfQorCitzdGF0aWMgdTMy IHJ2X2Ftb2FkZF9kKHU4IHJkLCB1OCByczIsIHU4IHJzMSwgdTggYXEsIHU4IHJsKQoreworCXJl dHVybiBydl9hbW9faW5zbigwLCBhcSwgcmwsIHJzMiwgcnMxLCAzLCByZCwgMHgyZik7Cit9CisK K3N0YXRpYyBib29sIGlzXzEyYl9pbnQoczY0IHZhbCkKK3sKKwlyZXR1cm4gLSgxIDw8IDExKSA8 PSB2YWwgJiYgdmFsIDwgKDEgPDwgMTEpOworfQorCitzdGF0aWMgYm9vbCBpc18zMmJfaW50KHM2 NCB2YWwpCit7CisJcmV0dXJuIC0oMUwgPDwgMzEpIDw9IHZhbCAmJiB2YWwgPCAoMUwgPDwgMzEp OworfQorCisvKiBqdW1wcyAqLworc3RhdGljIGJvb2wgaXNfMjFiX2ludChzNjQgdmFsKQorewor CXJldHVybiAtKDFMIDw8IDIwKSA8PSB2YWwgJiYgdmFsIDwgKDFMIDw8IDIwKTsKKworfQorCisv KiBjb25kaXRpb25hbCBicmFuY2hlcyAqLworc3RhdGljIGJvb2wgaXNfMTNiX2ludChzNjQgdmFs KQoreworCXJldHVybiAtKDEgPDwgMTIpIDw9IHZhbCAmJiB2YWwgPCAoMSA8PCAxMik7Cit9CisK K3N0YXRpYyB2b2lkIGVtaXRfaW1tKHU4IHJkLCBzNjQgdmFsLCBzdHJ1Y3QgcnZfaml0X2NvbnRl eHQgKmN0eCkKK3sKKwkvKiBOb3RlIHRoYXQgdGhlIGltbWVkaWF0ZSBmcm9tIHRoZSBhZGQgaXMg c2lnbi1leHRlbmRlZCwKKwkgKiB3aGljaCBtZWFucyB0aGF0IHdlIG5lZWQgdG8gY29tcGVuc2F0 ZSB0aGlzIGJ5IGFkZGluZyAyXjEyLAorCSAqIHdoZW4gdGhlIDEydGggYml0IGlzIHNldC4gQSBz aW1wbGVyIHdheSBvZiBkb2luZyB0aGlzLCBhbmQKKwkgKiBnZXR0aW5nIHJpZCBvZiB0aGUgY2hl Y2ssIGlzIHRvIGp1c3QgYWRkIDIqKjExIGJlZm9yZSB0aGUKKwkgKiBzaGlmdC4gVGhlICJMb2Fk aW5nIGEgMzItQml0IGNvbnN0YW50IiBleGFtcGxlIGZyb20gdGhlCisJICogIkNvbXB1dGVyIE9y Z2FuaXphdGlvbiBhbmQgRGVzaWduLCBSSVNDLVYgZWRpdGlvbiIgYm9vayBieQorCSAqIFBhdHRl cnNvbi9IZW5uZXNzeSBoaWdobGlnaHRzIHRoaXMgZmFjdC4KKwkgKgorCSAqIFRoaXMgYWxzbyBt ZWFucyB0aGF0IHdlIG5lZWQgdG8gcHJvY2VzcyBMU0IgdG8gTVNCLgorCSAqLworCXM2NCB1cHBl ciA9ICh2YWwgKyAoMSA8PCAxMSkpID4+IDEyLCBsb3dlciA9IHZhbCAmIDB4ZmZmOworCWludCBz aGlmdDsKKworCWlmIChpc18zMmJfaW50KHZhbCkpIHsKKwkJaWYgKHVwcGVyKQorCQkJZW1pdChy dl9sdWkocmQsIHVwcGVyKSwgY3R4KTsKKworCQlpZiAoIXVwcGVyKSB7CisJCQllbWl0KHJ2X2Fk ZGkocmQsIFJWX1JFR19aRVJPLCBsb3dlciksIGN0eCk7CisJCQlyZXR1cm47CisJCX0KKworCQll bWl0KHJ2X2FkZGl3KHJkLCByZCwgbG93ZXIpLCBjdHgpOworCQlyZXR1cm47CisJfQorCisJc2hp ZnQgPSBfX2Zmcyh1cHBlcik7CisJdXBwZXIgPj49IHNoaWZ0OworCXNoaWZ0ICs9IDEyOworCisJ ZW1pdF9pbW0ocmQsIHVwcGVyLCBjdHgpOworCisJZW1pdChydl9zbGxpKHJkLCByZCwgc2hpZnQp LCBjdHgpOworCWlmIChsb3dlcikKKwkJZW1pdChydl9hZGRpKHJkLCByZCwgbG93ZXIpLCBjdHgp OworfQorCitzdGF0aWMgaW50IHJ2X29mZnNldChpbnQgYnBmX3RvLCBpbnQgYnBmX2Zyb20sIHN0 cnVjdCBydl9qaXRfY29udGV4dCAqY3R4KQoreworCWludCBmcm9tID0gY3R4LT5vZmZzZXRbYnBm X2Zyb21dIC0gMSwgdG8gPSBjdHgtPm9mZnNldFticGZfdG9dOworCisJcmV0dXJuICh0byAtIGZy b20pIDw8IDI7Cit9CisKK3N0YXRpYyBpbnQgZXBpbG9ndWVfb2Zmc2V0KHN0cnVjdCBydl9qaXRf Y29udGV4dCAqY3R4KQoreworCWludCB0byA9IGN0eC0+ZXBpbG9ndWVfb2Zmc2V0LCBmcm9tID0g Y3R4LT5uaW5zbnM7CisKKwlyZXR1cm4gKHRvIC0gZnJvbSkgPDwgMjsKK30KKworc3RhdGljIGlu dCBlbWl0X2luc24oY29uc3Qgc3RydWN0IGJwZl9pbnNuICppbnNuLCBzdHJ1Y3QgcnZfaml0X2Nv bnRleHQgKmN0eCwKKwkJICAgICBib29sIGV4dHJhX3Bhc3MpCit7CisJYm9vbCBpczY0ID0gQlBG X0NMQVNTKGluc24tPmNvZGUpID09IEJQRl9BTFU2NDsKKwlpbnQgcnZvZmYsIGkgPSBpbnNuIC0g Y3R4LT5wcm9nLT5pbnNuc2k7CisJdTggcmQsIHJzLCBjb2RlID0gaW5zbi0+Y29kZTsKKwlzMTYg b2ZmID0gaW5zbi0+b2ZmOworCXMzMiBpbW0gPSBpbnNuLT5pbW07CisKKwlzd2l0Y2ggKGNvZGUp IHsKKwkvKiBkc3QgPSBzcmMgKi8KKwljYXNlIEJQRl9BTFUgfCBCUEZfTU9WIHwgQlBGX1g6CisJ Y2FzZSBCUEZfQUxVNjQgfCBCUEZfTU9WIHwgQlBGX1g6CisJCXJzID0gYnBmX3RvX3J2X3JlZyhp bnNuLT5zcmNfcmVnLCBjdHgpOworCQlyZCA9IGJwZl90b19ydl9yZWcoaW5zbi0+ZHN0X3JlZywg Y3R4KTsKKwkJZW1pdChpczY0ID8gcnZfYWRkaShyZCwgcnMsIDApIDogcnZfYWRkaXcocmQsIHJz LCAwKSwgY3R4KTsKKwkJaWYgKCFpczY0KSB7CisJCQllbWl0KHJ2X3NsbGkocmQsIHJkLCAzMiks IGN0eCk7CisJCQllbWl0KHJ2X3NybGkocmQsIHJkLCAzMiksIGN0eCk7CisJCX0KKwkJYnJlYWs7 CisKKwkvKiBkc3QgPSBkc3QgT1Agc3JjICovCisJY2FzZSBCUEZfQUxVIHwgQlBGX0FERCB8IEJQ Rl9YOgorCWNhc2UgQlBGX0FMVTY0IHwgQlBGX0FERCB8IEJQRl9YOgorCQlycyA9IGJwZl90b19y dl9yZWcoaW5zbi0+c3JjX3JlZywgY3R4KTsKKwkJcmQgPSBicGZfdG9fcnZfcmVnKGluc24tPmRz dF9yZWcsIGN0eCk7CisJCWVtaXQoaXM2NCA/IHJ2X2FkZChyZCwgcmQsIHJzKSA6IHJ2X2FkZHco cmQsIHJkLCBycyksIGN0eCk7CisJCWJyZWFrOworCWNhc2UgQlBGX0FMVSB8IEJQRl9TVUIgfCBC UEZfWDoKKwljYXNlIEJQRl9BTFU2NCB8IEJQRl9TVUIgfCBCUEZfWDoKKwkJcnMgPSBicGZfdG9f cnZfcmVnKGluc24tPnNyY19yZWcsIGN0eCk7CisJCXJkID0gYnBmX3RvX3J2X3JlZyhpbnNuLT5k c3RfcmVnLCBjdHgpOworCQllbWl0KGlzNjQgPyBydl9zdWIocmQsIHJkLCBycykgOiBydl9zdWJ3 KHJkLCByZCwgcnMpLCBjdHgpOworCQlicmVhazsKKwljYXNlIEJQRl9BTFUgfCBCUEZfQU5EIHwg QlBGX1g6CisJY2FzZSBCUEZfQUxVNjQgfCBCUEZfQU5EIHwgQlBGX1g6CisJCXJzID0gYnBmX3Rv X3J2X3JlZyhpbnNuLT5zcmNfcmVnLCBjdHgpOworCQlyZCA9IGJwZl90b19ydl9yZWcoaW5zbi0+ ZHN0X3JlZywgY3R4KTsKKwkJZW1pdChydl9hbmQocmQsIHJkLCBycyksIGN0eCk7CisJCWJyZWFr OworCWNhc2UgQlBGX0FMVSB8IEJQRl9PUiB8IEJQRl9YOgorCWNhc2UgQlBGX0FMVTY0IHwgQlBG X09SIHwgQlBGX1g6CisJCXJzID0gYnBmX3RvX3J2X3JlZyhpbnNuLT5zcmNfcmVnLCBjdHgpOwor CQlyZCA9IGJwZl90b19ydl9yZWcoaW5zbi0+ZHN0X3JlZywgY3R4KTsKKwkJZW1pdChydl9vcihy ZCwgcmQsIHJzKSwgY3R4KTsKKwkJYnJlYWs7CisJY2FzZSBCUEZfQUxVIHwgQlBGX1hPUiB8IEJQ Rl9YOgorCWNhc2UgQlBGX0FMVTY0IHwgQlBGX1hPUiB8IEJQRl9YOgorCQlycyA9IGJwZl90b19y dl9yZWcoaW5zbi0+c3JjX3JlZywgY3R4KTsKKwkJcmQgPSBicGZfdG9fcnZfcmVnKGluc24tPmRz dF9yZWcsIGN0eCk7CisJCWVtaXQocnZfeG9yKHJkLCByZCwgcnMpLCBjdHgpOworCQlicmVhazsK KwljYXNlIEJQRl9BTFUgfCBCUEZfTVVMIHwgQlBGX1g6CisJY2FzZSBCUEZfQUxVNjQgfCBCUEZf TVVMIHwgQlBGX1g6CisJCXJzID0gYnBmX3RvX3J2X3JlZyhpbnNuLT5zcmNfcmVnLCBjdHgpOwor CQlyZCA9IGJwZl90b19ydl9yZWcoaW5zbi0+ZHN0X3JlZywgY3R4KTsKKwkJZW1pdChpczY0ID8g cnZfbXVsKHJkLCByZCwgcnMpIDogcnZfbXVsdyhyZCwgcmQsIHJzKSwgY3R4KTsKKwkJaWYgKCFp czY0KSB7CisJCQllbWl0KHJ2X3NsbGkocmQsIHJkLCAzMiksIGN0eCk7CisJCQllbWl0KHJ2X3Ny bGkocmQsIHJkLCAzMiksIGN0eCk7CisJCX0KKwkJYnJlYWs7CisJY2FzZSBCUEZfQUxVIHwgQlBG X0RJViB8IEJQRl9YOgorCWNhc2UgQlBGX0FMVTY0IHwgQlBGX0RJViB8IEJQRl9YOgorCQlycyA9 IGJwZl90b19ydl9yZWcoaW5zbi0+c3JjX3JlZywgY3R4KTsKKwkJcmQgPSBicGZfdG9fcnZfcmVn KGluc24tPmRzdF9yZWcsIGN0eCk7CisJCWVtaXQoaXM2NCA/IHJ2X2RpdnUocmQsIHJkLCBycykg OiBydl9kaXZ1dyhyZCwgcmQsIHJzKSwgY3R4KTsKKwkJaWYgKCFpczY0KSB7CisJCQllbWl0KHJ2 X3NsbGkocmQsIHJkLCAzMiksIGN0eCk7CisJCQllbWl0KHJ2X3NybGkocmQsIHJkLCAzMiksIGN0 eCk7CisJCX0KKwkJYnJlYWs7CisJY2FzZSBCUEZfQUxVIHwgQlBGX01PRCB8IEJQRl9YOgorCWNh c2UgQlBGX0FMVTY0IHwgQlBGX01PRCB8IEJQRl9YOgorCQlycyA9IGJwZl90b19ydl9yZWcoaW5z bi0+c3JjX3JlZywgY3R4KTsKKwkJcmQgPSBicGZfdG9fcnZfcmVnKGluc24tPmRzdF9yZWcsIGN0 eCk7CisJCWVtaXQoaXM2NCA/IHJ2X3JlbXUocmQsIHJkLCBycykgOiBydl9yZW11dyhyZCwgcmQs IHJzKSwgY3R4KTsKKwkJaWYgKCFpczY0KSB7CisJCQllbWl0KHJ2X3NsbGkocmQsIHJkLCAzMiks IGN0eCk7CisJCQllbWl0KHJ2X3NybGkocmQsIHJkLCAzMiksIGN0eCk7CisJCX0KKwkJYnJlYWs7 CisJY2FzZSBCUEZfQUxVIHwgQlBGX0xTSCB8IEJQRl9YOgorCWNhc2UgQlBGX0FMVTY0IHwgQlBG X0xTSCB8IEJQRl9YOgorCQlycyA9IGJwZl90b19ydl9yZWcoaW5zbi0+c3JjX3JlZywgY3R4KTsK KwkJcmQgPSBicGZfdG9fcnZfcmVnKGluc24tPmRzdF9yZWcsIGN0eCk7CisJCWVtaXQoaXM2NCA/ IHJ2X3NsbChyZCwgcmQsIHJzKSA6IHJ2X3NsbHcocmQsIHJkLCBycyksIGN0eCk7CisJCWJyZWFr OworCWNhc2UgQlBGX0FMVSB8IEJQRl9SU0ggfCBCUEZfWDoKKwljYXNlIEJQRl9BTFU2NCB8IEJQ Rl9SU0ggfCBCUEZfWDoKKwkJcnMgPSBicGZfdG9fcnZfcmVnKGluc24tPnNyY19yZWcsIGN0eCk7 CisJCXJkID0gYnBmX3RvX3J2X3JlZyhpbnNuLT5kc3RfcmVnLCBjdHgpOworCQllbWl0KGlzNjQg PyBydl9zcmwocmQsIHJkLCBycykgOiBydl9zcmx3KHJkLCByZCwgcnMpLCBjdHgpOworCQlicmVh azsKKwljYXNlIEJQRl9BTFUgfCBCUEZfQVJTSCB8IEJQRl9YOgorCWNhc2UgQlBGX0FMVTY0IHwg QlBGX0FSU0ggfCBCUEZfWDoKKwkJcnMgPSBicGZfdG9fcnZfcmVnKGluc24tPnNyY19yZWcsIGN0 eCk7CisJCXJkID0gYnBmX3RvX3J2X3JlZyhpbnNuLT5kc3RfcmVnLCBjdHgpOworCQllbWl0KGlz NjQgPyBydl9zcmEocmQsIHJkLCBycykgOiBydl9zcmF3KHJkLCByZCwgcnMpLCBjdHgpOworCQli cmVhazsKKworCS8qIGRzdCA9IC1kc3QgKi8KKwljYXNlIEJQRl9BTFUgfCBCUEZfTkVHOgorCWNh c2UgQlBGX0FMVTY0IHwgQlBGX05FRzoKKwkJcmQgPSBicGZfdG9fcnZfcmVnKGluc24tPmRzdF9y ZWcsIGN0eCk7CisJCWVtaXQoaXM2NCA/CisJCSAgICAgcnZfc3ViKHJkLCBSVl9SRUdfWkVSTywg cmQpIDoKKwkJICAgICBydl9zdWJ3KHJkLCBSVl9SRUdfWkVSTywgcmQpLAorCQkgICAgIGN0eCk7 CisJCWJyZWFrOworCisJLyogZHN0ID0gQlNXQVAjI2ltbShkc3QpICovCisJY2FzZSBCUEZfQUxV IHwgQlBGX0VORCB8IEJQRl9GUk9NX0xFOgorCXsKKwkJaW50IHNoaWZ0ID0gNjQgLSBpbW07CisK KwkJcmQgPSBicGZfdG9fcnZfcmVnKGluc24tPmRzdF9yZWcsIGN0eCk7CisJCWVtaXQocnZfc2xs aShyZCwgcmQsIHNoaWZ0KSwgY3R4KTsKKwkJZW1pdChydl9zcmxpKHJkLCByZCwgc2hpZnQpLCBj dHgpOworCQlicmVhazsKKwl9CisJY2FzZSBCUEZfQUxVIHwgQlBGX0VORCB8IEJQRl9GUk9NX0JF OgorCQlyZCA9IGJwZl90b19ydl9yZWcoaW5zbi0+ZHN0X3JlZywgY3R4KTsKKworCQllbWl0KHJ2 X2FkZGkoUlZfUkVHX1QyLCBSVl9SRUdfWkVSTywgMCksIGN0eCk7CisKKwkJZW1pdChydl9hbmRp KFJWX1JFR19UMSwgcmQsIDB4ZmYpLCBjdHgpOworCQllbWl0KHJ2X2FkZChSVl9SRUdfVDIsIFJW X1JFR19UMiwgUlZfUkVHX1QxKSwgY3R4KTsKKwkJZW1pdChydl9zbGxpKFJWX1JFR19UMiwgUlZf UkVHX1QyLCA4KSwgY3R4KTsKKwkJZW1pdChydl9zcmxpKHJkLCByZCwgOCksIGN0eCk7CisJCWlm IChpbW0gPT0gMTYpCisJCQlnb3RvIG91dF9iZTsKKworCQllbWl0KHJ2X2FuZGkoUlZfUkVHX1Qx LCByZCwgMHhmZiksIGN0eCk7CisJCWVtaXQocnZfYWRkKFJWX1JFR19UMiwgUlZfUkVHX1QyLCBS Vl9SRUdfVDEpLCBjdHgpOworCQllbWl0KHJ2X3NsbGkoUlZfUkVHX1QyLCBSVl9SRUdfVDIsIDgp LCBjdHgpOworCQllbWl0KHJ2X3NybGkocmQsIHJkLCA4KSwgY3R4KTsKKworCQllbWl0KHJ2X2Fu ZGkoUlZfUkVHX1QxLCByZCwgMHhmZiksIGN0eCk7CisJCWVtaXQocnZfYWRkKFJWX1JFR19UMiwg UlZfUkVHX1QyLCBSVl9SRUdfVDEpLCBjdHgpOworCQllbWl0KHJ2X3NsbGkoUlZfUkVHX1QyLCBS Vl9SRUdfVDIsIDgpLCBjdHgpOworCQllbWl0KHJ2X3NybGkocmQsIHJkLCA4KSwgY3R4KTsKKwkJ aWYgKGltbSA9PSAzMikKKwkJCWdvdG8gb3V0X2JlOworCisJCWVtaXQocnZfYW5kaShSVl9SRUdf VDEsIHJkLCAweGZmKSwgY3R4KTsKKwkJZW1pdChydl9hZGQoUlZfUkVHX1QyLCBSVl9SRUdfVDIs IFJWX1JFR19UMSksIGN0eCk7CisJCWVtaXQocnZfc2xsaShSVl9SRUdfVDIsIFJWX1JFR19UMiwg OCksIGN0eCk7CisJCWVtaXQocnZfc3JsaShyZCwgcmQsIDgpLCBjdHgpOworCisJCWVtaXQocnZf YW5kaShSVl9SRUdfVDEsIHJkLCAweGZmKSwgY3R4KTsKKwkJZW1pdChydl9hZGQoUlZfUkVHX1Qy LCBSVl9SRUdfVDIsIFJWX1JFR19UMSksIGN0eCk7CisJCWVtaXQocnZfc2xsaShSVl9SRUdfVDIs IFJWX1JFR19UMiwgOCksIGN0eCk7CisJCWVtaXQocnZfc3JsaShyZCwgcmQsIDgpLCBjdHgpOwor CisJCWVtaXQocnZfYW5kaShSVl9SRUdfVDEsIHJkLCAweGZmKSwgY3R4KTsKKwkJZW1pdChydl9h ZGQoUlZfUkVHX1QyLCBSVl9SRUdfVDIsIFJWX1JFR19UMSksIGN0eCk7CisJCWVtaXQocnZfc2xs aShSVl9SRUdfVDIsIFJWX1JFR19UMiwgOCksIGN0eCk7CisJCWVtaXQocnZfc3JsaShyZCwgcmQs IDgpLCBjdHgpOworCisJCWVtaXQocnZfYW5kaShSVl9SRUdfVDEsIHJkLCAweGZmKSwgY3R4KTsK KwkJZW1pdChydl9hZGQoUlZfUkVHX1QyLCBSVl9SRUdfVDIsIFJWX1JFR19UMSksIGN0eCk7CisJ CWVtaXQocnZfc2xsaShSVl9SRUdfVDIsIFJWX1JFR19UMiwgOCksIGN0eCk7CisJCWVtaXQocnZf c3JsaShyZCwgcmQsIDgpLCBjdHgpOworCW91dF9iZToKKwkJZW1pdChydl9hbmRpKFJWX1JFR19U MSwgcmQsIDB4ZmYpLCBjdHgpOworCQllbWl0KHJ2X2FkZChSVl9SRUdfVDIsIFJWX1JFR19UMiwg UlZfUkVHX1QxKSwgY3R4KTsKKworCQllbWl0KHJ2X2FkZGkocmQsIFJWX1JFR19UMiwgMCksIGN0 eCk7CisJCWJyZWFrOworCisJLyogZHN0ID0gaW1tICovCisJY2FzZSBCUEZfQUxVIHwgQlBGX01P ViB8IEJQRl9LOgorCWNhc2UgQlBGX0FMVTY0IHwgQlBGX01PViB8IEJQRl9LOgorCQlyZCA9IGJw Zl90b19ydl9yZWcoaW5zbi0+ZHN0X3JlZywgY3R4KTsKKwkJZW1pdF9pbW0ocmQsIGltbSwgY3R4 KTsKKwkJaWYgKCFpczY0KSB7CisJCQllbWl0KHJ2X3NsbGkocmQsIHJkLCAzMiksIGN0eCk7CisJ CQllbWl0KHJ2X3NybGkocmQsIHJkLCAzMiksIGN0eCk7CisJCX0KKwkJYnJlYWs7CisKKwkvKiBk c3QgPSBkc3QgT1AgaW1tICovCisJY2FzZSBCUEZfQUxVIHwgQlBGX0FERCB8IEJQRl9LOgorCWNh c2UgQlBGX0FMVTY0IHwgQlBGX0FERCB8IEJQRl9LOgorCQlyZCA9IGJwZl90b19ydl9yZWcoaW5z bi0+ZHN0X3JlZywgY3R4KTsKKwkJaWYgKGlzXzEyYl9pbnQoaW1tKSkgeworCQkJZW1pdChpczY0 ID8gcnZfYWRkaShyZCwgcmQsIGltbSkgOgorCQkJICAgICBydl9hZGRpdyhyZCwgcmQsIGltbSks IGN0eCk7CisJCQlpZiAoIWlzNjQpIHsKKwkJCQllbWl0KHJ2X3NsbGkocmQsIHJkLCAzMiksIGN0 eCk7CisJCQkJZW1pdChydl9zcmxpKHJkLCByZCwgMzIpLCBjdHgpOworCQkJfQorCQkJYnJlYWs7 CisJCX0KKwkJZW1pdF9pbW0oUlZfUkVHX1QxLCBpbW0sIGN0eCk7CisJCWVtaXQoaXM2NCA/IHJ2 X2FkZChyZCwgcmQsIFJWX1JFR19UMSkgOgorCQkgICAgIHJ2X2FkZHcocmQsIHJkLCBSVl9SRUdf VDEpLCBjdHgpOworCQlpZiAoIWlzNjQpIHsKKwkJCWVtaXQocnZfc2xsaShyZCwgcmQsIDMyKSwg Y3R4KTsKKwkJCWVtaXQocnZfc3JsaShyZCwgcmQsIDMyKSwgY3R4KTsKKwkJfQorCQlicmVhazsK KwljYXNlIEJQRl9BTFUgfCBCUEZfU1VCIHwgQlBGX0s6CisJY2FzZSBCUEZfQUxVNjQgfCBCUEZf U1VCIHwgQlBGX0s6CisJCXJkID0gYnBmX3RvX3J2X3JlZyhpbnNuLT5kc3RfcmVnLCBjdHgpOwor CQlpZiAoaXNfMTJiX2ludCgtaW1tKSkgeworCQkJZW1pdChpczY0ID8gcnZfYWRkaShyZCwgcmQs IC1pbW0pIDoKKwkJCSAgICAgcnZfYWRkaXcocmQsIHJkLCAtaW1tKSwgY3R4KTsKKwkJCWlmICgh aXM2NCkgeworCQkJCWVtaXQocnZfc2xsaShyZCwgcmQsIDMyKSwgY3R4KTsKKwkJCQllbWl0KHJ2 X3NybGkocmQsIHJkLCAzMiksIGN0eCk7CisJCQl9CisJCQlicmVhazsKKwkJfQorCQllbWl0X2lt bShSVl9SRUdfVDEsIGltbSwgY3R4KTsKKwkJZW1pdChpczY0ID8gcnZfc3ViKHJkLCByZCwgUlZf UkVHX1QxKSA6CisJCSAgICAgcnZfc3VidyhyZCwgcmQsIFJWX1JFR19UMSksIGN0eCk7CisJCWlm ICghaXM2NCkgeworCQkJZW1pdChydl9zbGxpKHJkLCByZCwgMzIpLCBjdHgpOworCQkJZW1pdChy dl9zcmxpKHJkLCByZCwgMzIpLCBjdHgpOworCQl9CisJCWJyZWFrOworCWNhc2UgQlBGX0FMVSB8 IEJQRl9BTkQgfCBCUEZfSzoKKwljYXNlIEJQRl9BTFU2NCB8IEJQRl9BTkQgfCBCUEZfSzoKKwkJ cmQgPSBicGZfdG9fcnZfcmVnKGluc24tPmRzdF9yZWcsIGN0eCk7CisJCWlmIChpc18xMmJfaW50 KGltbSkpIHsKKwkJCWVtaXQocnZfYW5kaShyZCwgcmQsIGltbSksIGN0eCk7CisJCQlpZiAoIWlz NjQpIHsKKwkJCQllbWl0KHJ2X3NsbGkocmQsIHJkLCAzMiksIGN0eCk7CisJCQkJZW1pdChydl9z cmxpKHJkLCByZCwgMzIpLCBjdHgpOworCQkJfQorCQkJYnJlYWs7CisJCX0KKwkJZW1pdF9pbW0o UlZfUkVHX1QxLCBpbW0sIGN0eCk7CisJCWVtaXQocnZfYW5kKHJkLCByZCwgUlZfUkVHX1QxKSwg Y3R4KTsKKwkJaWYgKCFpczY0KSB7CisJCQllbWl0KHJ2X3NsbGkocmQsIHJkLCAzMiksIGN0eCk7 CisJCQllbWl0KHJ2X3NybGkocmQsIHJkLCAzMiksIGN0eCk7CisJCX0KKwkJYnJlYWs7CisJY2Fz ZSBCUEZfQUxVIHwgQlBGX09SIHwgQlBGX0s6CisJY2FzZSBCUEZfQUxVNjQgfCBCUEZfT1IgfCBC UEZfSzoKKwkJcmQgPSBicGZfdG9fcnZfcmVnKGluc24tPmRzdF9yZWcsIGN0eCk7CisJCWlmIChp c18xMmJfaW50KGltbSkpIHsKKwkJCWVtaXQocnZfb3JpKHJkLCByZCwgaW1tKSwgY3R4KTsKKwkJ CWlmICghaXM2NCkgeworCQkJCWVtaXQocnZfc2xsaShyZCwgcmQsIDMyKSwgY3R4KTsKKwkJCQll bWl0KHJ2X3NybGkocmQsIHJkLCAzMiksIGN0eCk7CisJCQl9CisJCQlicmVhazsKKwkJfQorCQll bWl0X2ltbShSVl9SRUdfVDEsIGltbSwgY3R4KTsKKwkJZW1pdChydl9vcihyZCwgcmQsIFJWX1JF R19UMSksIGN0eCk7CisJCWlmICghaXM2NCkgeworCQkJZW1pdChydl9zbGxpKHJkLCByZCwgMzIp LCBjdHgpOworCQkJZW1pdChydl9zcmxpKHJkLCByZCwgMzIpLCBjdHgpOworCQl9CisJCWJyZWFr OworCWNhc2UgQlBGX0FMVSB8IEJQRl9YT1IgfCBCUEZfSzoKKwljYXNlIEJQRl9BTFU2NCB8IEJQ Rl9YT1IgfCBCUEZfSzoKKwkJcmQgPSBicGZfdG9fcnZfcmVnKGluc24tPmRzdF9yZWcsIGN0eCk7 CisJCWlmIChpc18xMmJfaW50KGltbSkpIHsKKwkJCWVtaXQocnZfeG9yaShyZCwgcmQsIGltbSks IGN0eCk7CisJCQlpZiAoIWlzNjQpIHsKKwkJCQllbWl0KHJ2X3NsbGkocmQsIHJkLCAzMiksIGN0 eCk7CisJCQkJZW1pdChydl9zcmxpKHJkLCByZCwgMzIpLCBjdHgpOworCQkJfQorCQkJYnJlYWs7 CisJCX0KKwkJZW1pdF9pbW0oUlZfUkVHX1QxLCBpbW0sIGN0eCk7CisJCWVtaXQocnZfeG9yKHJk LCByZCwgUlZfUkVHX1QxKSwgY3R4KTsKKwkJaWYgKCFpczY0KSB7CisJCQllbWl0KHJ2X3NsbGko cmQsIHJkLCAzMiksIGN0eCk7CisJCQllbWl0KHJ2X3NybGkocmQsIHJkLCAzMiksIGN0eCk7CisJ CX0KKwkJYnJlYWs7CisJY2FzZSBCUEZfQUxVIHwgQlBGX01VTCB8IEJQRl9LOgorCWNhc2UgQlBG X0FMVTY0IHwgQlBGX01VTCB8IEJQRl9LOgorCQlyZCA9IGJwZl90b19ydl9yZWcoaW5zbi0+ZHN0 X3JlZywgY3R4KTsKKwkJZW1pdF9pbW0oUlZfUkVHX1QxLCBpbW0sIGN0eCk7CisJCWVtaXQoaXM2 NCA/IHJ2X211bChyZCwgcmQsIFJWX1JFR19UMSkgOgorCQkgICAgIHJ2X211bHcocmQsIHJkLCBS Vl9SRUdfVDEpLCBjdHgpOworCQlpZiAoIWlzNjQpIHsKKwkJCWVtaXQocnZfc2xsaShyZCwgcmQs IDMyKSwgY3R4KTsKKwkJCWVtaXQocnZfc3JsaShyZCwgcmQsIDMyKSwgY3R4KTsKKwkJfQorCQli cmVhazsKKwljYXNlIEJQRl9BTFUgfCBCUEZfRElWIHwgQlBGX0s6CisJY2FzZSBCUEZfQUxVNjQg fCBCUEZfRElWIHwgQlBGX0s6CisJCXJkID0gYnBmX3RvX3J2X3JlZyhpbnNuLT5kc3RfcmVnLCBj dHgpOworCQllbWl0X2ltbShSVl9SRUdfVDEsIGltbSwgY3R4KTsKKwkJZW1pdChpczY0ID8gcnZf ZGl2dShyZCwgcmQsIFJWX1JFR19UMSkgOgorCQkgICAgIHJ2X2RpdnV3KHJkLCByZCwgUlZfUkVH X1QxKSwgY3R4KTsKKwkJaWYgKCFpczY0KSB7CisJCQllbWl0KHJ2X3NsbGkocmQsIHJkLCAzMiks IGN0eCk7CisJCQllbWl0KHJ2X3NybGkocmQsIHJkLCAzMiksIGN0eCk7CisJCX0KKwkJYnJlYWs7 CisJY2FzZSBCUEZfQUxVIHwgQlBGX01PRCB8IEJQRl9LOgorCWNhc2UgQlBGX0FMVTY0IHwgQlBG X01PRCB8IEJQRl9LOgorCQlyZCA9IGJwZl90b19ydl9yZWcoaW5zbi0+ZHN0X3JlZywgY3R4KTsK KwkJZW1pdF9pbW0oUlZfUkVHX1QxLCBpbW0sIGN0eCk7CisJCWVtaXQoaXM2NCA/IHJ2X3JlbXUo cmQsIHJkLCBSVl9SRUdfVDEpIDoKKwkJICAgICBydl9yZW11dyhyZCwgcmQsIFJWX1JFR19UMSks IGN0eCk7CisJCWlmICghaXM2NCkgeworCQkJZW1pdChydl9zbGxpKHJkLCByZCwgMzIpLCBjdHgp OworCQkJZW1pdChydl9zcmxpKHJkLCByZCwgMzIpLCBjdHgpOworCQl9CisJCWJyZWFrOworCWNh c2UgQlBGX0FMVSB8IEJQRl9MU0ggfCBCUEZfSzoKKwljYXNlIEJQRl9BTFU2NCB8IEJQRl9MU0gg fCBCUEZfSzoKKwkJcmQgPSBicGZfdG9fcnZfcmVnKGluc24tPmRzdF9yZWcsIGN0eCk7CisJCWVt aXQoaXM2NCA/IHJ2X3NsbGkocmQsIHJkLCBpbW0pIDoKKwkJICAgICBydl9zbGxpdyhyZCwgcmQs IGltbSksICBjdHgpOworCQlicmVhazsKKwljYXNlIEJQRl9BTFUgfCBCUEZfUlNIIHwgQlBGX0s6 CisJY2FzZSBCUEZfQUxVNjQgfCBCUEZfUlNIIHwgQlBGX0s6CisJCXJkID0gYnBmX3RvX3J2X3Jl ZyhpbnNuLT5kc3RfcmVnLCBjdHgpOworCQllbWl0KGlzNjQgPyBydl9zcmxpKHJkLCByZCwgaW1t KSA6CisJCSAgICAgcnZfc3JsaXcocmQsIHJkLCBpbW0pLCAgY3R4KTsKKwkJYnJlYWs7CisJY2Fz ZSBCUEZfQUxVIHwgQlBGX0FSU0ggfCBCUEZfSzoKKwljYXNlIEJQRl9BTFU2NCB8IEJQRl9BUlNI IHwgQlBGX0s6CisJCXJkID0gYnBmX3RvX3J2X3JlZyhpbnNuLT5kc3RfcmVnLCBjdHgpOworCQll bWl0KGlzNjQgPyBydl9zcmFpKHJkLCByZCwgaW1tKSA6CisJCSAgICAgcnZfc3JhaXcocmQsIHJk LCBpbW0pLCAgY3R4KTsKKwkJYnJlYWs7CisKKwkvKiBKVU1QIG9mZiAqLworCWNhc2UgQlBGX0pN UCB8IEJQRl9KQToKKwkJcnZvZmYgPSBydl9vZmZzZXQoaSArIG9mZiwgaSwgY3R4KTsKKwkJaWYg KCFpc18yMWJfaW50KHJ2b2ZmKSkgeworCQkJcHJfZXJyKCJicGYtaml0OiAlZCBvZmZzZXQ9JWQg bm90IHN1cHBvcnRlZCB5ZXQhXG4iLAorCQkJICAgICAgIF9fTElORV9fLCBydm9mZik7CisJCQly ZXR1cm4gLTE7CisJCX0KKworCQllbWl0KHJ2X2phbChSVl9SRUdfWkVSTywgcnZvZmYgPj4gMSks IGN0eCk7CisJCWJyZWFrOworCisJLyogSUYgKGRzdCBDT05EIHNyYykgSlVNUCBvZmYgKi8KKwlj YXNlIEJQRl9KTVAgfCBCUEZfSkVRIHwgQlBGX1g6CisJCXJ2b2ZmID0gcnZfb2Zmc2V0KGkgKyBv ZmYsIGksIGN0eCk7CisJCWlmICghaXNfMTNiX2ludChydm9mZikpIHsKKwkJCXByX2VycigiYnBm LWppdDogJWQgb2Zmc2V0PSVkIG5vdCBzdXBwb3J0ZWQgeWV0IVxuIiwKKwkJCSAgICAgICBfX0xJ TkVfXywgKGludClydm9mZik7CisJCQlyZXR1cm4gLTE7CisJCX0KKwkJcnMgPSBicGZfdG9fcnZf cmVnKGluc24tPnNyY19yZWcsIGN0eCk7CisJCXJkID0gYnBmX3RvX3J2X3JlZyhpbnNuLT5kc3Rf cmVnLCBjdHgpOworCQllbWl0KHJ2X2JlcShyZCwgcnMsIHJ2b2ZmID4+IDEpLCBjdHgpOworCQli cmVhazsKKwljYXNlIEJQRl9KTVAgfCBCUEZfSkdUIHwgQlBGX1g6CisJCXJ2b2ZmID0gcnZfb2Zm c2V0KGkgKyBvZmYsIGksIGN0eCk7CisJCWlmICghaXNfMTNiX2ludChydm9mZikpIHsKKwkJCXBy X2VycigiYnBmLWppdDogJWQgb2Zmc2V0PSVkIG5vdCBzdXBwb3J0ZWQgeWV0IVxuIiwKKwkJCSAg ICAgICBfX0xJTkVfXywgKGludClydm9mZik7CisJCQlyZXR1cm4gLTE7CisJCX0KKwkJcnMgPSBi cGZfdG9fcnZfcmVnKGluc24tPnNyY19yZWcsIGN0eCk7CisJCXJkID0gYnBmX3RvX3J2X3JlZyhp bnNuLT5kc3RfcmVnLCBjdHgpOworCQllbWl0KHJ2X2JsdHUocnMsIHJkLCBydm9mZiA+PiAxKSwg Y3R4KTsKKwkJYnJlYWs7CisJY2FzZSBCUEZfSk1QIHwgQlBGX0pMVCB8IEJQRl9YOgorCQlydm9m ZiA9IHJ2X29mZnNldChpICsgb2ZmLCBpLCBjdHgpOworCQlpZiAoIWlzXzEzYl9pbnQocnZvZmYp KSB7CisJCQlwcl9lcnIoImJwZi1qaXQ6ICVkIG9mZnNldD0lZCBub3Qgc3VwcG9ydGVkIHlldCFc biIsCisJCQkgICAgICAgX19MSU5FX18sIChpbnQpcnZvZmYpOworCQkJcmV0dXJuIC0xOworCQl9 CisJCXJzID0gYnBmX3RvX3J2X3JlZyhpbnNuLT5zcmNfcmVnLCBjdHgpOworCQlyZCA9IGJwZl90 b19ydl9yZWcoaW5zbi0+ZHN0X3JlZywgY3R4KTsKKwkJZW1pdChydl9ibHR1KHJkLCBycywgcnZv ZmYgPj4gMSksIGN0eCk7CisJCWJyZWFrOworCWNhc2UgQlBGX0pNUCB8IEJQRl9KR0UgfCBCUEZf WDoKKwkJcnZvZmYgPSBydl9vZmZzZXQoaSArIG9mZiwgaSwgY3R4KTsKKwkJaWYgKCFpc18xM2Jf aW50KHJ2b2ZmKSkgeworCQkJcHJfZXJyKCJicGYtaml0OiAlZCBvZmZzZXQ9JWQgbm90IHN1cHBv cnRlZCB5ZXQhXG4iLAorCQkJICAgICAgIF9fTElORV9fLCAoaW50KXJ2b2ZmKTsKKwkJCXJldHVy biAtMTsKKwkJfQorCQlycyA9IGJwZl90b19ydl9yZWcoaW5zbi0+c3JjX3JlZywgY3R4KTsKKwkJ cmQgPSBicGZfdG9fcnZfcmVnKGluc24tPmRzdF9yZWcsIGN0eCk7CisJCWVtaXQocnZfYmdldShy ZCwgcnMsIHJ2b2ZmID4+IDEpLCBjdHgpOworCQlicmVhazsKKwljYXNlIEJQRl9KTVAgfCBCUEZf SkxFIHwgQlBGX1g6CisJCXJ2b2ZmID0gcnZfb2Zmc2V0KGkgKyBvZmYsIGksIGN0eCk7CisJCWlm ICghaXNfMTNiX2ludChydm9mZikpIHsKKwkJCXByX2VycigiYnBmLWppdDogJWQgb2Zmc2V0PSVk IG5vdCBzdXBwb3J0ZWQgeWV0IVxuIiwKKwkJCSAgICAgICBfX0xJTkVfXywgKGludClydm9mZik7 CisJCQlyZXR1cm4gLTE7CisJCX0KKwkJcnMgPSBicGZfdG9fcnZfcmVnKGluc24tPnNyY19yZWcs IGN0eCk7CisJCXJkID0gYnBmX3RvX3J2X3JlZyhpbnNuLT5kc3RfcmVnLCBjdHgpOworCQllbWl0 KHJ2X2JnZXUocnMsIHJkLCBydm9mZiA+PiAxKSwgY3R4KTsKKwkJYnJlYWs7CisJY2FzZSBCUEZf Sk1QIHwgQlBGX0pORSB8IEJQRl9YOgorCQlydm9mZiA9IHJ2X29mZnNldChpICsgb2ZmLCBpLCBj dHgpOworCQlpZiAoIWlzXzEzYl9pbnQocnZvZmYpKSB7CisJCQlwcl9lcnIoImJwZi1qaXQ6ICVk IG9mZnNldD0lZCBub3Qgc3VwcG9ydGVkIHlldCFcbiIsCisJCQkgICAgICAgX19MSU5FX18sIChp bnQpcnZvZmYpOworCQkJcmV0dXJuIC0xOworCQl9CisJCXJzID0gYnBmX3RvX3J2X3JlZyhpbnNu LT5zcmNfcmVnLCBjdHgpOworCQlyZCA9IGJwZl90b19ydl9yZWcoaW5zbi0+ZHN0X3JlZywgY3R4 KTsKKwkJZW1pdChydl9ibmUocmQsIHJzLCBydm9mZiA+PiAxKSwgY3R4KTsKKwkJYnJlYWs7CisJ Y2FzZSBCUEZfSk1QIHwgQlBGX0pTR1QgfCBCUEZfWDoKKwkJcnZvZmYgPSBydl9vZmZzZXQoaSAr IG9mZiwgaSwgY3R4KTsKKwkJaWYgKCFpc18xM2JfaW50KHJ2b2ZmKSkgeworCQkJcHJfZXJyKCJi cGYtaml0OiAlZCBvZmZzZXQ9JWQgbm90IHN1cHBvcnRlZCB5ZXQhXG4iLAorCQkJICAgICAgIF9f TElORV9fLCAoaW50KXJ2b2ZmKTsKKwkJCXJldHVybiAtMTsKKwkJfQorCQlycyA9IGJwZl90b19y dl9yZWcoaW5zbi0+c3JjX3JlZywgY3R4KTsKKwkJcmQgPSBicGZfdG9fcnZfcmVnKGluc24tPmRz dF9yZWcsIGN0eCk7CisJCWVtaXQocnZfYmx0KHJzLCByZCwgcnZvZmYgPj4gMSksIGN0eCk7CisJ CWJyZWFrOworCWNhc2UgQlBGX0pNUCB8IEJQRl9KU0xUIHwgQlBGX1g6CisJCXJ2b2ZmID0gcnZf b2Zmc2V0KGkgKyBvZmYsIGksIGN0eCk7CisJCWlmICghaXNfMTNiX2ludChydm9mZikpIHsKKwkJ CXByX2VycigiYnBmLWppdDogJWQgb2Zmc2V0PSVkIG5vdCBzdXBwb3J0ZWQgeWV0IVxuIiwKKwkJ CSAgICAgICBfX0xJTkVfXywgKGludClydm9mZik7CisJCQlyZXR1cm4gLTE7CisJCX0KKwkJcnMg PSBicGZfdG9fcnZfcmVnKGluc24tPnNyY19yZWcsIGN0eCk7CisJCXJkID0gYnBmX3RvX3J2X3Jl ZyhpbnNuLT5kc3RfcmVnLCBjdHgpOworCQllbWl0KHJ2X2JsdChyZCwgcnMsIHJ2b2ZmID4+IDEp LCBjdHgpOworCQlicmVhazsKKwljYXNlIEJQRl9KTVAgfCBCUEZfSlNHRSB8IEJQRl9YOgorCQly dm9mZiA9IHJ2X29mZnNldChpICsgb2ZmLCBpLCBjdHgpOworCQlpZiAoIWlzXzEzYl9pbnQocnZv ZmYpKSB7CisJCQlwcl9lcnIoImJwZi1qaXQ6ICVkIG9mZnNldD0lZCBub3Qgc3VwcG9ydGVkIHll dCFcbiIsCisJCQkgICAgICAgX19MSU5FX18sIChpbnQpcnZvZmYpOworCQkJcmV0dXJuIC0xOwor CQl9CisJCXJzID0gYnBmX3RvX3J2X3JlZyhpbnNuLT5zcmNfcmVnLCBjdHgpOworCQlyZCA9IGJw Zl90b19ydl9yZWcoaW5zbi0+ZHN0X3JlZywgY3R4KTsKKwkJZW1pdChydl9iZ2UocmQsIHJzLCBy dm9mZiA+PiAxKSwgY3R4KTsKKwkJYnJlYWs7CisJY2FzZSBCUEZfSk1QIHwgQlBGX0pTTEUgfCBC UEZfWDoKKwkJcnZvZmYgPSBydl9vZmZzZXQoaSArIG9mZiwgaSwgY3R4KTsKKwkJaWYgKCFpc18x M2JfaW50KHJ2b2ZmKSkgeworCQkJcHJfZXJyKCJicGYtaml0OiAlZCBvZmZzZXQ9JWQgbm90IHN1 cHBvcnRlZCB5ZXQhXG4iLAorCQkJICAgICAgIF9fTElORV9fLCAoaW50KXJ2b2ZmKTsKKwkJCXJl dHVybiAtMTsKKwkJfQorCQlycyA9IGJwZl90b19ydl9yZWcoaW5zbi0+c3JjX3JlZywgY3R4KTsK KwkJcmQgPSBicGZfdG9fcnZfcmVnKGluc24tPmRzdF9yZWcsIGN0eCk7CisJCWVtaXQocnZfYmdl KHJzLCByZCwgcnZvZmYgPj4gMSksIGN0eCk7CisJCWJyZWFrOworCWNhc2UgQlBGX0pNUCB8IEJQ Rl9KU0VUIHwgQlBGX1g6CisJCXJ2b2ZmID0gcnZfb2Zmc2V0KGkgKyBvZmYsIGksIGN0eCk7CisJ CWlmICghaXNfMTNiX2ludChydm9mZikpIHsKKwkJCXByX2VycigiYnBmLWppdDogJWQgb2Zmc2V0 PSVkIG5vdCBzdXBwb3J0ZWQgeWV0IVxuIiwKKwkJCSAgICAgICBfX0xJTkVfXywgKGludClydm9m Zik7CisJCQlyZXR1cm4gLTE7CisJCX0KKwkJcnMgPSBicGZfdG9fcnZfcmVnKGluc24tPnNyY19y ZWcsIGN0eCk7CisJCXJkID0gYnBmX3RvX3J2X3JlZyhpbnNuLT5kc3RfcmVnLCBjdHgpOworCQll bWl0KHJ2X2FuZChSVl9SRUdfVDEsIHJkLCBycyksIGN0eCk7CisJCWVtaXQocnZfYm5lKFJWX1JF R19UMSwgUlZfUkVHX1pFUk8sIHJ2b2ZmID4+IDEpLCBjdHgpOworCQlicmVhazsKKworCS8qIElG IChkc3QgQ09ORCBpbW0pIEpVTVAgb2ZmICovCisJY2FzZSBCUEZfSk1QIHwgQlBGX0pFUSB8IEJQ Rl9LOgorCQlydm9mZiA9IHJ2X29mZnNldChpICsgb2ZmLCBpLCBjdHgpOworCQlpZiAoIWlzXzEz Yl9pbnQocnZvZmYpKSB7CisJCQlwcl9lcnIoImJwZi1qaXQ6ICVkIG9mZnNldD0lZCBub3Qgc3Vw cG9ydGVkIHlldCFcbiIsCisJCQkgICAgICAgX19MSU5FX18sIChpbnQpcnZvZmYpOworCQkJcmV0 dXJuIC0xOworCQl9CisJCXJkID0gYnBmX3RvX3J2X3JlZyhpbnNuLT5kc3RfcmVnLCBjdHgpOwor CQllbWl0X2ltbShSVl9SRUdfVDEsIGltbSwgY3R4KTsKKwkJZW1pdChydl9iZXEocmQsIFJWX1JF R19UMSwgcnZvZmYgPj4gMSksIGN0eCk7CisJCWJyZWFrOworCWNhc2UgQlBGX0pNUCB8IEJQRl9K R1QgfCBCUEZfSzoKKwkJcnZvZmYgPSBydl9vZmZzZXQoaSArIG9mZiwgaSwgY3R4KTsKKwkJaWYg KCFpc18xM2JfaW50KHJ2b2ZmKSkgeworCQkJcHJfZXJyKCJicGYtaml0OiAlZCBvZmZzZXQ9JWQg bm90IHN1cHBvcnRlZCB5ZXQhXG4iLAorCQkJICAgICAgIF9fTElORV9fLCAoaW50KXJ2b2ZmKTsK KwkJCXJldHVybiAtMTsKKwkJfQorCQlyZCA9IGJwZl90b19ydl9yZWcoaW5zbi0+ZHN0X3JlZywg Y3R4KTsKKwkJZW1pdF9pbW0oUlZfUkVHX1QxLCBpbW0sIGN0eCk7CisJCWVtaXQocnZfYmx0dShS Vl9SRUdfVDEsIHJkLCBydm9mZiA+PiAxKSwgY3R4KTsKKwkJYnJlYWs7CisJY2FzZSBCUEZfSk1Q IHwgQlBGX0pMVCB8IEJQRl9LOgorCQlydm9mZiA9IHJ2X29mZnNldChpICsgb2ZmLCBpLCBjdHgp OworCQlpZiAoIWlzXzEzYl9pbnQocnZvZmYpKSB7CisJCQlwcl9lcnIoImJwZi1qaXQ6ICVkIG9m ZnNldD0lZCBub3Qgc3VwcG9ydGVkIHlldCFcbiIsCisJCQkgICAgICAgX19MSU5FX18sIChpbnQp cnZvZmYpOworCQkJcmV0dXJuIC0xOworCQl9CisJCXJkID0gYnBmX3RvX3J2X3JlZyhpbnNuLT5k c3RfcmVnLCBjdHgpOworCQllbWl0X2ltbShSVl9SRUdfVDEsIGltbSwgY3R4KTsKKwkJZW1pdChy dl9ibHR1KHJkLCBSVl9SRUdfVDEsIHJ2b2ZmID4+IDEpLCBjdHgpOworCQlicmVhazsKKwljYXNl IEJQRl9KTVAgfCBCUEZfSkdFIHwgQlBGX0s6CisJCXJ2b2ZmID0gcnZfb2Zmc2V0KGkgKyBvZmYs IGksIGN0eCk7CisJCWlmICghaXNfMTNiX2ludChydm9mZikpIHsKKwkJCXByX2VycigiYnBmLWpp dDogJWQgb2Zmc2V0PSVkIG5vdCBzdXBwb3J0ZWQgeWV0IVxuIiwKKwkJCSAgICAgICBfX0xJTkVf XywgKGludClydm9mZik7CisJCQlyZXR1cm4gLTE7CisJCX0KKwkJcmQgPSBicGZfdG9fcnZfcmVn KGluc24tPmRzdF9yZWcsIGN0eCk7CisJCWVtaXRfaW1tKFJWX1JFR19UMSwgaW1tLCBjdHgpOwor CQllbWl0KHJ2X2JnZXUocmQsIFJWX1JFR19UMSwgcnZvZmYgPj4gMSksIGN0eCk7CisJCWJyZWFr OworCWNhc2UgQlBGX0pNUCB8IEJQRl9KTEUgfCBCUEZfSzoKKwkJcnZvZmYgPSBydl9vZmZzZXQo aSArIG9mZiwgaSwgY3R4KTsKKwkJaWYgKCFpc18xM2JfaW50KHJ2b2ZmKSkgeworCQkJcHJfZXJy KCJicGYtaml0OiAlZCBvZmZzZXQ9JWQgbm90IHN1cHBvcnRlZCB5ZXQhXG4iLAorCQkJICAgICAg IF9fTElORV9fLCAoaW50KXJ2b2ZmKTsKKwkJCXJldHVybiAtMTsKKwkJfQorCQlyZCA9IGJwZl90 b19ydl9yZWcoaW5zbi0+ZHN0X3JlZywgY3R4KTsKKwkJZW1pdF9pbW0oUlZfUkVHX1QxLCBpbW0s IGN0eCk7CisJCWVtaXQocnZfYmdldShSVl9SRUdfVDEsIHJkLCBydm9mZiA+PiAxKSwgY3R4KTsK KwkJYnJlYWs7CisJY2FzZSBCUEZfSk1QIHwgQlBGX0pORSB8IEJQRl9LOgorCQlydm9mZiA9IHJ2 X29mZnNldChpICsgb2ZmLCBpLCBjdHgpOworCQlpZiAoIWlzXzEzYl9pbnQocnZvZmYpKSB7CisJ CQlwcl9lcnIoImJwZi1qaXQ6ICVkIG9mZnNldD0lZCBub3Qgc3VwcG9ydGVkIHlldCFcbiIsCisJ CQkgICAgICAgX19MSU5FX18sIChpbnQpcnZvZmYpOworCQkJcmV0dXJuIC0xOworCQl9CisJCXJk ID0gYnBmX3RvX3J2X3JlZyhpbnNuLT5kc3RfcmVnLCBjdHgpOworCQllbWl0X2ltbShSVl9SRUdf VDEsIGltbSwgY3R4KTsKKwkJZW1pdChydl9ibmUocmQsIFJWX1JFR19UMSwgcnZvZmYgPj4gMSks IGN0eCk7CisJCWJyZWFrOworCWNhc2UgQlBGX0pNUCB8IEJQRl9KU0dUIHwgQlBGX0s6CisJCXJ2 b2ZmID0gcnZfb2Zmc2V0KGkgKyBvZmYsIGksIGN0eCk7CisJCWlmICghaXNfMTNiX2ludChydm9m ZikpIHsKKwkJCXByX2VycigiYnBmLWppdDogJWQgb2Zmc2V0PSVkIG5vdCBzdXBwb3J0ZWQgeWV0 IVxuIiwKKwkJCSAgICAgICBfX0xJTkVfXywgKGludClydm9mZik7CisJCQlyZXR1cm4gLTE7CisJ CX0KKwkJcmQgPSBicGZfdG9fcnZfcmVnKGluc24tPmRzdF9yZWcsIGN0eCk7CisJCWVtaXRfaW1t KFJWX1JFR19UMSwgaW1tLCBjdHgpOworCQllbWl0KHJ2X2JsdChSVl9SRUdfVDEsIHJkLCBydm9m ZiA+PiAxKSwgY3R4KTsKKwkJYnJlYWs7CisJY2FzZSBCUEZfSk1QIHwgQlBGX0pTTFQgfCBCUEZf SzoKKwkJcnZvZmYgPSBydl9vZmZzZXQoaSArIG9mZiwgaSwgY3R4KTsKKwkJaWYgKCFpc18xM2Jf aW50KHJ2b2ZmKSkgeworCQkJcHJfZXJyKCJicGYtaml0OiAlZCBvZmZzZXQ9JWQgbm90IHN1cHBv cnRlZCB5ZXQhXG4iLAorCQkJICAgICAgIF9fTElORV9fLCAoaW50KXJ2b2ZmKTsKKwkJCXJldHVy biAtMTsKKwkJfQorCQlyZCA9IGJwZl90b19ydl9yZWcoaW5zbi0+ZHN0X3JlZywgY3R4KTsKKwkJ ZW1pdF9pbW0oUlZfUkVHX1QxLCBpbW0sIGN0eCk7CisJCWVtaXQocnZfYmx0KHJkLCBSVl9SRUdf VDEsIHJ2b2ZmID4+IDEpLCBjdHgpOworCQlicmVhazsKKwljYXNlIEJQRl9KTVAgfCBCUEZfSlNH RSB8IEJQRl9LOgorCQlydm9mZiA9IHJ2X29mZnNldChpICsgb2ZmLCBpLCBjdHgpOworCQlpZiAo IWlzXzEzYl9pbnQocnZvZmYpKSB7CisJCQlwcl9lcnIoImJwZi1qaXQ6ICVkIG9mZnNldD0lZCBu b3Qgc3VwcG9ydGVkIHlldCFcbiIsCisJCQkgICAgICAgX19MSU5FX18sIChpbnQpcnZvZmYpOwor CQkJcmV0dXJuIC0xOworCQl9CisJCXJkID0gYnBmX3RvX3J2X3JlZyhpbnNuLT5kc3RfcmVnLCBj dHgpOworCQllbWl0X2ltbShSVl9SRUdfVDEsIGltbSwgY3R4KTsKKwkJZW1pdChydl9iZ2UocmQs IFJWX1JFR19UMSwgcnZvZmYgPj4gMSksIGN0eCk7CisJCWJyZWFrOworCWNhc2UgQlBGX0pNUCB8 IEJQRl9KU0xFIHwgQlBGX0s6CisJCXJ2b2ZmID0gcnZfb2Zmc2V0KGkgKyBvZmYsIGksIGN0eCk7 CisJCWlmICghaXNfMTNiX2ludChydm9mZikpIHsKKwkJCXByX2VycigiYnBmLWppdDogJWQgb2Zm c2V0PSVkIG5vdCBzdXBwb3J0ZWQgeWV0IVxuIiwKKwkJCSAgICAgICBfX0xJTkVfXywgKGludCly dm9mZik7CisJCQlyZXR1cm4gLTE7CisJCX0KKwkJcmQgPSBicGZfdG9fcnZfcmVnKGluc24tPmRz dF9yZWcsIGN0eCk7CisJCWVtaXRfaW1tKFJWX1JFR19UMSwgaW1tLCBjdHgpOworCQllbWl0KHJ2 X2JnZShSVl9SRUdfVDEsIHJkLCBydm9mZiA+PiAxKSwgY3R4KTsKKwkJYnJlYWs7CisJY2FzZSBC UEZfSk1QIHwgQlBGX0pTRVQgfCBCUEZfSzoKKwkJcnZvZmYgPSBydl9vZmZzZXQoaSArIG9mZiwg aSwgY3R4KTsKKwkJaWYgKCFpc18xM2JfaW50KHJ2b2ZmKSkgeworCQkJcHJfZXJyKCJicGYtaml0 OiAlZCBvZmZzZXQ9JWQgbm90IHN1cHBvcnRlZCB5ZXQhXG4iLAorCQkJICAgICAgIF9fTElORV9f LCAoaW50KXJ2b2ZmKTsKKwkJCXJldHVybiAtMTsKKwkJfQorCQlyZCA9IGJwZl90b19ydl9yZWco aW5zbi0+ZHN0X3JlZywgY3R4KTsKKwkJZW1pdF9pbW0oUlZfUkVHX1QyLCBpbW0sIGN0eCk7CisJ CWVtaXQocnZfYW5kKFJWX1JFR19UMSwgcmQsIFJWX1JFR19UMiksIGN0eCk7CisJCWVtaXQocnZf Ym5lKFJWX1JFR19UMSwgUlZfUkVHX1pFUk8sIHJ2b2ZmID4+IDEpLCBjdHgpOworCQlicmVhazsK KworCS8qIGZ1bmN0aW9uIGNhbGwgKi8KKwljYXNlIEJQRl9KTVAgfCBCUEZfQ0FMTDoKKwl7CisJ CWJvb2wgZml4ZWQ7CisJCWludCBpLCByZXQ7CisJCXU2NCBhZGRyOworCisJCXNlZW5fY2FsbChj dHgpOworCQlyZXQgPSBicGZfaml0X2dldF9mdW5jX2FkZHIoY3R4LT5wcm9nLCBpbnNuLCBleHRy YV9wYXNzLCAmYWRkciwKKwkJCQkJICAgICZmaXhlZCk7CisJCWlmIChyZXQgPCAwKQorCQkJcmV0 dXJuIHJldDsKKwkJaWYgKGZpeGVkKSB7CisJCQllbWl0X2ltbShSVl9SRUdfVDEsIGFkZHIsIGN0 eCk7CisJCX0gZWxzZSB7CisJCQlpID0gY3R4LT5uaW5zbnM7CisJCQllbWl0X2ltbShSVl9SRUdf VDEsIGFkZHIsIGN0eCk7CisJCQlmb3IgKGkgPSBjdHgtPm5pbnNucyAtIGk7IGkgPCA4OyBpKysp IHsKKwkJCQkvKiBub3AgKi8KKwkJCQllbWl0KHJ2X2FkZGkoUlZfUkVHX1pFUk8sIFJWX1JFR19a RVJPLCAwKSwKKwkJCQkgICAgIGN0eCk7CisJCQl9CisJCX0KKwkJZW1pdChydl9qYWxyKFJWX1JF R19SQSwgUlZfUkVHX1QxLCAwKSwgY3R4KTsKKwkJcmQgPSBicGZfdG9fcnZfcmVnKEJQRl9SRUdf MCwgY3R4KTsKKwkJZW1pdChydl9hZGRpKHJkLCBSVl9SRUdfQTAsIDApLCBjdHgpOworCQlicmVh azsKKwl9CisJLyogdGFpbCBjYWxsICovCisJY2FzZSBCUEZfSk1QIHwgQlBGX1RBSUxfQ0FMTDoK KwkJcmQgPSBicGZfdG9fcnZfcmVnKFRBSUxfQ0FMTF9SRUcsIGN0eCk7CisJCXByX2VycigiYnBm LWppdDogdGFpbCBjYWxsIG5vdCBzdXBwb3J0ZWQgeWV0IVxuIik7CisJCXJldHVybiAtMTsKKwor CS8qIGZ1bmN0aW9uIHJldHVybiAqLworCWNhc2UgQlBGX0pNUCB8IEJQRl9FWElUOgorCQlpZiAo aSA9PSBjdHgtPnByb2ctPmxlbiAtIDEpCisJCQlicmVhazsKKworCQlydm9mZiA9IGVwaWxvZ3Vl X29mZnNldChjdHgpOworCQlpZiAoIWlzXzIxYl9pbnQocnZvZmYpKSB7CisJCQlwcl9lcnIoImJw Zi1qaXQ6ICVkIG9mZnNldD0lZCBub3Qgc3VwcG9ydGVkIHlldCFcbiIsCisJCQkgICAgICAgX19M SU5FX18sIHJ2b2ZmKTsKKwkJCXJldHVybiAtMTsKKwkJfQorCisJCWVtaXQocnZfamFsKFJWX1JF R19aRVJPLCBydm9mZiA+PiAxKSwgY3R4KTsKKwkJYnJlYWs7CisKKwkvKiBkc3QgPSBpbW02NCAq LworCWNhc2UgQlBGX0xEIHwgQlBGX0lNTSB8IEJQRl9EVzoKKwl7CisJCXN0cnVjdCBicGZfaW5z biBpbnNuMSA9IGluc25bMV07CisJCXU2NCBpbW02NDsKKworCQlpbW02NCA9ICh1NjQpaW5zbjEu aW1tIDw8IDMyIHwgKHUzMilpbW07CisJCXJkID0gYnBmX3RvX3J2X3JlZyhpbnNuLT5kc3RfcmVn LCBjdHgpOworCQllbWl0X2ltbShyZCwgaW1tNjQsIGN0eCk7CisJCXJldHVybiAxOworCX0KKwor CS8qIExEWDogZHN0ID0gKihzaXplICopKHNyYyArIG9mZikgKi8KKwljYXNlIEJQRl9MRFggfCBC UEZfTUVNIHwgQlBGX0I6CisJCXJzID0gYnBmX3RvX3J2X3JlZyhpbnNuLT5zcmNfcmVnLCBjdHgp OworCQlyZCA9IGJwZl90b19ydl9yZWcoaW5zbi0+ZHN0X3JlZywgY3R4KTsKKwkJaWYgKGlzXzEy Yl9pbnQob2ZmKSkgeworCQkJZW1pdChydl9sYnUocmQsIG9mZiwgcnMpLCBjdHgpOworCQkJYnJl YWs7CisJCX0KKworCQllbWl0X2ltbShSVl9SRUdfVDEsIG9mZiwgY3R4KTsKKwkJZW1pdChydl9h ZGQoUlZfUkVHX1QxLCBSVl9SRUdfVDEsIHJzKSwgY3R4KTsKKwkJZW1pdChydl9sYnUocmQsIDAs IFJWX1JFR19UMSksIGN0eCk7CisJCWJyZWFrOworCWNhc2UgQlBGX0xEWCB8IEJQRl9NRU0gfCBC UEZfSDoKKwkJcnMgPSBicGZfdG9fcnZfcmVnKGluc24tPnNyY19yZWcsIGN0eCk7CisJCXJkID0g YnBmX3RvX3J2X3JlZyhpbnNuLT5kc3RfcmVnLCBjdHgpOworCQlpZiAoaXNfMTJiX2ludChvZmYp KSB7CisJCQllbWl0KHJ2X2xodShyZCwgb2ZmLCBycyksIGN0eCk7CisJCQlicmVhazsKKwkJfQor CisJCWVtaXRfaW1tKFJWX1JFR19UMSwgb2ZmLCBjdHgpOworCQllbWl0KHJ2X2FkZChSVl9SRUdf VDEsIFJWX1JFR19UMSwgcnMpLCBjdHgpOworCQllbWl0KHJ2X2xodShyZCwgMCwgUlZfUkVHX1Qx KSwgY3R4KTsKKwkJYnJlYWs7CisJY2FzZSBCUEZfTERYIHwgQlBGX01FTSB8IEJQRl9XOgorCQly cyA9IGJwZl90b19ydl9yZWcoaW5zbi0+c3JjX3JlZywgY3R4KTsKKwkJcmQgPSBicGZfdG9fcnZf cmVnKGluc24tPmRzdF9yZWcsIGN0eCk7CisJCWlmIChpc18xMmJfaW50KG9mZikpIHsKKwkJCWVt aXQocnZfbHd1KHJkLCBvZmYsIHJzKSwgY3R4KTsKKwkJCWJyZWFrOworCQl9CisKKwkJZW1pdF9p bW0oUlZfUkVHX1QxLCBvZmYsIGN0eCk7CisJCWVtaXQocnZfYWRkKFJWX1JFR19UMSwgUlZfUkVH X1QxLCBycyksIGN0eCk7CisJCWVtaXQocnZfbHd1KHJkLCAwLCBSVl9SRUdfVDEpLCBjdHgpOwor CQlicmVhazsKKwljYXNlIEJQRl9MRFggfCBCUEZfTUVNIHwgQlBGX0RXOgorCQlycyA9IGJwZl90 b19ydl9yZWcoaW5zbi0+c3JjX3JlZywgY3R4KTsKKwkJcmQgPSBicGZfdG9fcnZfcmVnKGluc24t PmRzdF9yZWcsIGN0eCk7CisJCWlmIChpc18xMmJfaW50KG9mZikpIHsKKwkJCWVtaXQocnZfbGQo cmQsIG9mZiwgcnMpLCBjdHgpOworCQkJYnJlYWs7CisJCX0KKworCQllbWl0X2ltbShSVl9SRUdf VDEsIG9mZiwgY3R4KTsKKwkJZW1pdChydl9hZGQoUlZfUkVHX1QxLCBSVl9SRUdfVDEsIHJzKSwg Y3R4KTsKKwkJZW1pdChydl9sZChyZCwgMCwgUlZfUkVHX1QxKSwgY3R4KTsKKwkJYnJlYWs7CisK KwkvKiBTVDogKihzaXplICopKGRzdCArIG9mZikgPSBpbW0gKi8KKwljYXNlIEJQRl9TVCB8IEJQ Rl9NRU0gfCBCUEZfQjoKKwkJcmQgPSBicGZfdG9fcnZfcmVnKGluc24tPmRzdF9yZWcsIGN0eCk7 CisJCWVtaXRfaW1tKFJWX1JFR19UMSwgaW1tLCBjdHgpOworCQlpZiAoaXNfMTJiX2ludChvZmYp KSB7CisJCQllbWl0KHJ2X3NiKHJkLCBvZmYsIFJWX1JFR19UMSksIGN0eCk7CisJCQlicmVhazsK KwkJfQorCisJCWVtaXRfaW1tKFJWX1JFR19UMiwgb2ZmLCBjdHgpOworCQllbWl0KHJ2X2FkZChS Vl9SRUdfVDIsIFJWX1JFR19UMiwgcmQpLCBjdHgpOworCQllbWl0KHJ2X3NiKFJWX1JFR19UMiwg MCwgUlZfUkVHX1QxKSwgY3R4KTsKKwkJYnJlYWs7CisKKwljYXNlIEJQRl9TVCB8IEJQRl9NRU0g fCBCUEZfSDoKKwkJcmQgPSBicGZfdG9fcnZfcmVnKGluc24tPmRzdF9yZWcsIGN0eCk7CisJCWVt aXRfaW1tKFJWX1JFR19UMSwgaW1tLCBjdHgpOworCQlpZiAoaXNfMTJiX2ludChvZmYpKSB7CisJ CQllbWl0KHJ2X3NoKHJkLCBvZmYsIFJWX1JFR19UMSksIGN0eCk7CisJCQlicmVhazsKKwkJfQor CisJCWVtaXRfaW1tKFJWX1JFR19UMiwgb2ZmLCBjdHgpOworCQllbWl0KHJ2X2FkZChSVl9SRUdf VDIsIFJWX1JFR19UMiwgcmQpLCBjdHgpOworCQllbWl0KHJ2X3NoKFJWX1JFR19UMiwgMCwgUlZf UkVHX1QxKSwgY3R4KTsKKwkJYnJlYWs7CisJY2FzZSBCUEZfU1QgfCBCUEZfTUVNIHwgQlBGX1c6 CisJCXJkID0gYnBmX3RvX3J2X3JlZyhpbnNuLT5kc3RfcmVnLCBjdHgpOworCQllbWl0X2ltbShS Vl9SRUdfVDEsIGltbSwgY3R4KTsKKwkJaWYgKGlzXzEyYl9pbnQob2ZmKSkgeworCQkJZW1pdChy dl9zdyhyZCwgb2ZmLCBSVl9SRUdfVDEpLCBjdHgpOworCQkJYnJlYWs7CisJCX0KKworCQllbWl0 X2ltbShSVl9SRUdfVDIsIG9mZiwgY3R4KTsKKwkJZW1pdChydl9hZGQoUlZfUkVHX1QyLCBSVl9S RUdfVDIsIHJkKSwgY3R4KTsKKwkJZW1pdChydl9zdyhSVl9SRUdfVDIsIDAsIFJWX1JFR19UMSks IGN0eCk7CisJCWJyZWFrOworCWNhc2UgQlBGX1NUIHwgQlBGX01FTSB8IEJQRl9EVzoKKwkJcmQg PSBicGZfdG9fcnZfcmVnKGluc24tPmRzdF9yZWcsIGN0eCk7CisJCWVtaXRfaW1tKFJWX1JFR19U MSwgaW1tLCBjdHgpOworCQlpZiAoaXNfMTJiX2ludChvZmYpKSB7CisJCQllbWl0KHJ2X3NkKHJk LCBvZmYsIFJWX1JFR19UMSksIGN0eCk7CisJCQlicmVhazsKKwkJfQorCisJCWVtaXRfaW1tKFJW X1JFR19UMiwgb2ZmLCBjdHgpOworCQllbWl0KHJ2X2FkZChSVl9SRUdfVDIsIFJWX1JFR19UMiwg cmQpLCBjdHgpOworCQllbWl0KHJ2X3NkKFJWX1JFR19UMiwgMCwgUlZfUkVHX1QxKSwgY3R4KTsK KwkJYnJlYWs7CisKKwkvKiBTVFg6ICooc2l6ZSAqKShkc3QgKyBvZmYpID0gc3JjICovCisJY2Fz ZSBCUEZfU1RYIHwgQlBGX01FTSB8IEJQRl9COgorCQlycyA9IGJwZl90b19ydl9yZWcoaW5zbi0+ c3JjX3JlZywgY3R4KTsKKwkJcmQgPSBicGZfdG9fcnZfcmVnKGluc24tPmRzdF9yZWcsIGN0eCk7 CisJCWlmIChpc18xMmJfaW50KG9mZikpIHsKKwkJCWVtaXQocnZfc2IocmQsIG9mZiwgcnMpLCBj dHgpOworCQkJYnJlYWs7CisJCX0KKworCQllbWl0X2ltbShSVl9SRUdfVDEsIG9mZiwgY3R4KTsK KwkJZW1pdChydl9hZGQoUlZfUkVHX1QxLCBSVl9SRUdfVDEsIHJkKSwgY3R4KTsKKwkJZW1pdChy dl9zYihSVl9SRUdfVDEsIDAsIHJzKSwgY3R4KTsKKwkJYnJlYWs7CisJY2FzZSBCUEZfU1RYIHwg QlBGX01FTSB8IEJQRl9IOgorCQlycyA9IGJwZl90b19ydl9yZWcoaW5zbi0+c3JjX3JlZywgY3R4 KTsKKwkJcmQgPSBicGZfdG9fcnZfcmVnKGluc24tPmRzdF9yZWcsIGN0eCk7CisJCWlmIChpc18x MmJfaW50KG9mZikpIHsKKwkJCWVtaXQocnZfc2gocmQsIG9mZiwgcnMpLCBjdHgpOworCQkJYnJl YWs7CisJCX0KKworCQllbWl0X2ltbShSVl9SRUdfVDEsIG9mZiwgY3R4KTsKKwkJZW1pdChydl9h ZGQoUlZfUkVHX1QxLCBSVl9SRUdfVDEsIHJkKSwgY3R4KTsKKwkJZW1pdChydl9zaChSVl9SRUdf VDEsIDAsIHJzKSwgY3R4KTsKKwkJYnJlYWs7CisJY2FzZSBCUEZfU1RYIHwgQlBGX01FTSB8IEJQ Rl9XOgorCQlycyA9IGJwZl90b19ydl9yZWcoaW5zbi0+c3JjX3JlZywgY3R4KTsKKwkJcmQgPSBi cGZfdG9fcnZfcmVnKGluc24tPmRzdF9yZWcsIGN0eCk7CisJCWlmIChpc18xMmJfaW50KG9mZikp IHsKKwkJCWVtaXQocnZfc3cocmQsIG9mZiwgcnMpLCBjdHgpOworCQkJYnJlYWs7CisJCX0KKwor CQllbWl0X2ltbShSVl9SRUdfVDEsIG9mZiwgY3R4KTsKKwkJZW1pdChydl9hZGQoUlZfUkVHX1Qx LCBSVl9SRUdfVDEsIHJkKSwgY3R4KTsKKwkJZW1pdChydl9zdyhSVl9SRUdfVDEsIDAsIHJzKSwg Y3R4KTsKKwkJYnJlYWs7CisJY2FzZSBCUEZfU1RYIHwgQlBGX01FTSB8IEJQRl9EVzoKKwkJcnMg PSBicGZfdG9fcnZfcmVnKGluc24tPnNyY19yZWcsIGN0eCk7CisJCXJkID0gYnBmX3RvX3J2X3Jl ZyhpbnNuLT5kc3RfcmVnLCBjdHgpOworCQlpZiAoaXNfMTJiX2ludChvZmYpKSB7CisJCQllbWl0 KHJ2X3NkKHJkLCBvZmYsIHJzKSwgY3R4KTsKKwkJCWJyZWFrOworCQl9CisKKwkJZW1pdF9pbW0o UlZfUkVHX1QxLCBvZmYsIGN0eCk7CisJCWVtaXQocnZfYWRkKFJWX1JFR19UMSwgUlZfUkVHX1Qx LCByZCksIGN0eCk7CisJCWVtaXQocnZfc2QoUlZfUkVHX1QxLCAwLCBycyksIGN0eCk7CisJCWJy ZWFrOworCS8qIFNUWCBYQUREOiBsb2NrICoodTMyICopKGRzdCArIG9mZikgKz0gc3JjICovCisJ Y2FzZSBCUEZfU1RYIHwgQlBGX1hBREQgfCBCUEZfVzoKKwkvKiBTVFggWEFERDogbG9jayAqKHU2 NCAqKShkc3QgKyBvZmYpICs9IHNyYyAqLworCWNhc2UgQlBGX1NUWCB8IEJQRl9YQUREIHwgQlBG X0RXOgorCQlycyA9IGJwZl90b19ydl9yZWcoaW5zbi0+c3JjX3JlZywgY3R4KTsKKwkJcmQgPSBi cGZfdG9fcnZfcmVnKGluc24tPmRzdF9yZWcsIGN0eCk7CisJCWlmIChvZmYpIHsKKwkJCWlmIChp c18xMmJfaW50KG9mZikpIHsKKwkJCQllbWl0KHJ2X2FkZGkoUlZfUkVHX1QxLCByZCwgb2ZmKSwg Y3R4KTsKKwkJCX0gZWxzZSB7CisJCQkJZW1pdF9pbW0oUlZfUkVHX1QxLCBvZmYsIGN0eCk7CisJ CQkJZW1pdChydl9hZGQoUlZfUkVHX1QxLCBSVl9SRUdfVDEsIHJkKSwgY3R4KTsKKwkJCX0KKwor CQkJcmQgPSBSVl9SRUdfVDE7CisJCX0KKworCQllbWl0KEJQRl9TSVpFKGNvZGUpID09IEJQRl9X ID8KKwkJICAgICBydl9hbW9hZGRfdyhSVl9SRUdfWkVSTywgcnMsIHJkLCAwLCAwKSA6CisJCSAg ICAgcnZfYW1vYWRkX2QoUlZfUkVHX1pFUk8sIHJzLCByZCwgMCwgMCksIGN0eCk7CisJCWJyZWFr OworCWRlZmF1bHQ6CisJCXByX2VycigiYnBmLWppdDogdW5rbm93biBvcGNvZGUgJTAyeFxuIiwg Y29kZSk7CisJCXJldHVybiAtRUlOVkFMOworCX0KKworCXJldHVybiAwOworfQorCitzdGF0aWMg dm9pZCBidWlsZF9wcm9sb2d1ZShzdHJ1Y3QgcnZfaml0X2NvbnRleHQgKmN0eCkKK3sKKwlpbnQg c3RhY2tfYWRqdXN0ID0gMCwgc3RvcmVfb2Zmc2V0LCBicGZfc3RhY2tfYWRqdXN0OworCisJaWYg KHNlZW5fcmVnKFJWX1JFR19SQSwgY3R4KSkKKwkJc3RhY2tfYWRqdXN0ICs9IDg7CisJc3RhY2tf YWRqdXN0ICs9IDg7IC8qIFJWX1JFR19GUCAqLworCWlmIChzZWVuX3JlZyhSVl9SRUdfUzEsIGN0 eCkpCisJCXN0YWNrX2FkanVzdCArPSA4OworCWlmIChzZWVuX3JlZyhSVl9SRUdfUzIsIGN0eCkp CisJCXN0YWNrX2FkanVzdCArPSA4OworCWlmIChzZWVuX3JlZyhSVl9SRUdfUzMsIGN0eCkpCisJ CXN0YWNrX2FkanVzdCArPSA4OworCWlmIChzZWVuX3JlZyhSVl9SRUdfUzQsIGN0eCkpCisJCXN0 YWNrX2FkanVzdCArPSA4OworCWlmIChzZWVuX3JlZyhSVl9SRUdfUzUsIGN0eCkpCisJCXN0YWNr X2FkanVzdCArPSA4OworCWlmIChzZWVuX3JlZyhSVl9SRUdfUzYsIGN0eCkpCisJCXN0YWNrX2Fk anVzdCArPSA4OworCisJc3RhY2tfYWRqdXN0ID0gcm91bmRfdXAoc3RhY2tfYWRqdXN0LCAxNik7 CisJYnBmX3N0YWNrX2FkanVzdCA9IHJvdW5kX3VwKGN0eC0+cHJvZy0+YXV4LT5zdGFja19kZXB0 aCwgMTYpOworCXN0YWNrX2FkanVzdCArPSBicGZfc3RhY2tfYWRqdXN0OworCisJc3RvcmVfb2Zm c2V0ID0gc3RhY2tfYWRqdXN0IC0gODsKKworCWVtaXQocnZfYWRkaShSVl9SRUdfU1AsIFJWX1JF R19TUCwgLXN0YWNrX2FkanVzdCksIGN0eCk7CisKKwlpZiAoc2Vlbl9yZWcoUlZfUkVHX1JBLCBj dHgpKSB7CisJCWVtaXQocnZfc2QoUlZfUkVHX1NQLCBzdG9yZV9vZmZzZXQsIFJWX1JFR19SQSks IGN0eCk7CisJCXN0b3JlX29mZnNldCAtPSA4OworCX0KKwllbWl0KHJ2X3NkKFJWX1JFR19TUCwg c3RvcmVfb2Zmc2V0LCBSVl9SRUdfRlApLCBjdHgpOworCXN0b3JlX29mZnNldCAtPSA4OworCWlm IChzZWVuX3JlZyhSVl9SRUdfUzEsIGN0eCkpIHsKKwkJZW1pdChydl9zZChSVl9SRUdfU1AsIHN0 b3JlX29mZnNldCwgUlZfUkVHX1MxKSwgY3R4KTsKKwkJc3RvcmVfb2Zmc2V0IC09IDg7CisJfQor CWlmIChzZWVuX3JlZyhSVl9SRUdfUzIsIGN0eCkpIHsKKwkJZW1pdChydl9zZChSVl9SRUdfU1As IHN0b3JlX29mZnNldCwgUlZfUkVHX1MyKSwgY3R4KTsKKwkJc3RvcmVfb2Zmc2V0IC09IDg7CisJ fQorCWlmIChzZWVuX3JlZyhSVl9SRUdfUzMsIGN0eCkpIHsKKwkJZW1pdChydl9zZChSVl9SRUdf U1AsIHN0b3JlX29mZnNldCwgUlZfUkVHX1MzKSwgY3R4KTsKKwkJc3RvcmVfb2Zmc2V0IC09IDg7 CisJfQorCWlmIChzZWVuX3JlZyhSVl9SRUdfUzQsIGN0eCkpIHsKKwkJZW1pdChydl9zZChSVl9S RUdfU1AsIHN0b3JlX29mZnNldCwgUlZfUkVHX1M0KSwgY3R4KTsKKwkJc3RvcmVfb2Zmc2V0IC09 IDg7CisJfQorCWlmIChzZWVuX3JlZyhSVl9SRUdfUzUsIGN0eCkpIHsKKwkJZW1pdChydl9zZChS Vl9SRUdfU1AsIHN0b3JlX29mZnNldCwgUlZfUkVHX1M1KSwgY3R4KTsKKwkJc3RvcmVfb2Zmc2V0 IC09IDg7CisJfQorCWlmIChzZWVuX3JlZyhSVl9SRUdfUzYsIGN0eCkpIHsKKwkJZW1pdChydl9z ZChSVl9SRUdfU1AsIHN0b3JlX29mZnNldCwgUlZfUkVHX1M2KSwgY3R4KTsKKwkJc3RvcmVfb2Zm c2V0IC09IDg7CisJfQorCisJZW1pdChydl9hZGRpKFJWX1JFR19GUCwgUlZfUkVHX1NQLCBzdGFj a19hZGp1c3QpLCBjdHgpOworCisJaWYgKGJwZl9zdGFja19hZGp1c3QpIHsKKwkJaWYgKCFzZWVu X3JlZyhSVl9SRUdfUzUsIGN0eCkpCisJCQlwcl93YXJuKCJicGYtaml0OiBub3Qgc2VlbiBCUEZf UkVHX0ZQLCBzdGFjayBpcyAlZFxuIiwKKwkJCQlicGZfc3RhY2tfYWRqdXN0KTsKKwkJZW1pdChy dl9hZGRpKFJWX1JFR19TNSwgUlZfUkVHX1NQLCBicGZfc3RhY2tfYWRqdXN0KSwgY3R4KTsKKwl9 CisKKwljdHgtPnN0YWNrX3NpemUgPSBzdGFja19hZGp1c3Q7Cit9CisKK3N0YXRpYyB2b2lkIGJ1 aWxkX2VwaWxvZ3VlKHN0cnVjdCBydl9qaXRfY29udGV4dCAqY3R4KQoreworCWludCBzdGFja19h ZGp1c3QgPSBjdHgtPnN0YWNrX3NpemUsIHN0b3JlX29mZnNldCA9IHN0YWNrX2FkanVzdCAtIDg7 CisKKwlpZiAoc2Vlbl9yZWcoUlZfUkVHX1JBLCBjdHgpKSB7CisJCWVtaXQocnZfbGQoUlZfUkVH X1JBLCBzdG9yZV9vZmZzZXQsIFJWX1JFR19TUCksIGN0eCk7CisJCXN0b3JlX29mZnNldCAtPSA4 OworCX0KKwllbWl0KHJ2X2xkKFJWX1JFR19GUCwgc3RvcmVfb2Zmc2V0LCBSVl9SRUdfU1ApLCBj dHgpOworCXN0b3JlX29mZnNldCAtPSA4OworCWlmIChzZWVuX3JlZyhSVl9SRUdfUzEsIGN0eCkp IHsKKwkJZW1pdChydl9sZChSVl9SRUdfUzEsIHN0b3JlX29mZnNldCwgUlZfUkVHX1NQKSwgY3R4 KTsKKwkJc3RvcmVfb2Zmc2V0IC09IDg7CisJfQorCWlmIChzZWVuX3JlZyhSVl9SRUdfUzIsIGN0 eCkpIHsKKwkJZW1pdChydl9sZChSVl9SRUdfUzIsIHN0b3JlX29mZnNldCwgUlZfUkVHX1NQKSwg Y3R4KTsKKwkJc3RvcmVfb2Zmc2V0IC09IDg7CisJfQorCWlmIChzZWVuX3JlZyhSVl9SRUdfUzMs IGN0eCkpIHsKKwkJZW1pdChydl9sZChSVl9SRUdfUzMsIHN0b3JlX29mZnNldCwgUlZfUkVHX1NQ KSwgY3R4KTsKKwkJc3RvcmVfb2Zmc2V0IC09IDg7CisJfQorCWlmIChzZWVuX3JlZyhSVl9SRUdf UzQsIGN0eCkpIHsKKwkJZW1pdChydl9sZChSVl9SRUdfUzQsIHN0b3JlX29mZnNldCwgUlZfUkVH X1NQKSwgY3R4KTsKKwkJc3RvcmVfb2Zmc2V0IC09IDg7CisJfQorCWlmIChzZWVuX3JlZyhSVl9S RUdfUzUsIGN0eCkpIHsKKwkJZW1pdChydl9sZChSVl9SRUdfUzUsIHN0b3JlX29mZnNldCwgUlZf UkVHX1NQKSwgY3R4KTsKKwkJc3RvcmVfb2Zmc2V0IC09IDg7CisJfQorCWlmIChzZWVuX3JlZyhS Vl9SRUdfUzYsIGN0eCkpIHsKKwkJZW1pdChydl9sZChSVl9SRUdfUzYsIHN0b3JlX29mZnNldCwg UlZfUkVHX1NQKSwgY3R4KTsKKwkJc3RvcmVfb2Zmc2V0IC09IDg7CisJfQorCisJZW1pdChydl9h ZGRpKFJWX1JFR19TUCwgUlZfUkVHX1NQLCBzdGFja19hZGp1c3QpLCBjdHgpOworCS8qIFNldCBy ZXR1cm4gdmFsdWUuICovCisJZW1pdChydl9hZGRpKFJWX1JFR19BMCwgUlZfUkVHX0E1LCAwKSwg Y3R4KTsKKwllbWl0KHJ2X2phbHIoUlZfUkVHX1pFUk8sIFJWX1JFR19SQSwgMCksIGN0eCk7Cit9 CisKK3N0YXRpYyBpbnQgYnVpbGRfYm9keShzdHJ1Y3QgcnZfaml0X2NvbnRleHQgKmN0eCwgYm9v bCBleHRyYV9wYXNzKQoreworCWNvbnN0IHN0cnVjdCBicGZfcHJvZyAqcHJvZyA9IGN0eC0+cHJv ZzsKKwlpbnQgaTsKKworCWZvciAoaSA9IDA7IGkgPCBwcm9nLT5sZW47IGkrKykgeworCQljb25z dCBzdHJ1Y3QgYnBmX2luc24gKmluc24gPSAmcHJvZy0+aW5zbnNpW2ldOworCQlpbnQgcmV0Owor CisJCXJldCA9IGVtaXRfaW5zbihpbnNuLCBjdHgsIGV4dHJhX3Bhc3MpOworCQlpZiAocmV0ID4g MCkgeworCQkJaSsrOworCQkJaWYgKGN0eC0+aW5zbnMgPT0gTlVMTCkKKwkJCQljdHgtPm9mZnNl dFtpXSA9IGN0eC0+bmluc25zOworCQkJY29udGludWU7CisJCX0KKwkJaWYgKGN0eC0+aW5zbnMg PT0gTlVMTCkKKwkJCWN0eC0+b2Zmc2V0W2ldID0gY3R4LT5uaW5zbnM7CisJCWlmIChyZXQpCisJ CQlyZXR1cm4gcmV0OworCX0KKwlyZXR1cm4gMDsKK30KKworc3RhdGljIHZvaWQgYnBmX2ZpbGxf aWxsX2luc25zKHZvaWQgKmFyZWEsIHVuc2lnbmVkIGludCBzaXplKQoreworCW1lbXNldChhcmVh LCAwLCBzaXplKTsKK30KKworc3RhdGljIHZvaWQgYnBmX2ZsdXNoX2ljYWNoZSh2b2lkICpzdGFy dCwgdm9pZCAqZW5kKQoreworCWZsdXNoX2ljYWNoZV9yYW5nZSgodW5zaWduZWQgbG9uZylzdGFy dCwgKHVuc2lnbmVkIGxvbmcpZW5kKTsKK30KKwogc3RydWN0IGJwZl9wcm9nICpicGZfaW50X2pp dF9jb21waWxlKHN0cnVjdCBicGZfcHJvZyAqcHJvZykKIHsKKwlib29sIHRtcF9ibGluZGVkID0g ZmFsc2UsIGV4dHJhX3Bhc3MgPSBmYWxzZTsKKwlzdHJ1Y3QgYnBmX3Byb2cgKnRtcCwgKm9yaWdf cHJvZyA9IHByb2c7CisJc3RydWN0IHJ2X2ppdF9kYXRhICpqaXRfZGF0YTsKKwlzdHJ1Y3QgcnZf aml0X2NvbnRleHQgKmN0eDsKKwl1bnNpZ25lZCBpbnQgaW1hZ2Vfc2l6ZTsKKworCWlmICghcHJv Zy0+aml0X3JlcXVlc3RlZCkKKwkJcmV0dXJuIG9yaWdfcHJvZzsKKworCXRtcCA9IGJwZl9qaXRf YmxpbmRfY29uc3RhbnRzKHByb2cpOworCWlmIChJU19FUlIodG1wKSkKKwkJcmV0dXJuIG9yaWdf cHJvZzsKKwlpZiAodG1wICE9IHByb2cpIHsKKwkJdG1wX2JsaW5kZWQgPSB0cnVlOworCQlwcm9n ID0gdG1wOworCX0KKworCWppdF9kYXRhID0gcHJvZy0+YXV4LT5qaXRfZGF0YTsKKwlpZiAoIWpp dF9kYXRhKSB7CisJCWppdF9kYXRhID0ga3phbGxvYyhzaXplb2YoKmppdF9kYXRhKSwgR0ZQX0tF Uk5FTCk7CisJCWlmICghaml0X2RhdGEpIHsKKwkJCXByb2cgPSBvcmlnX3Byb2c7CisJCQlnb3Rv IG91dDsKKwkJfQorCQlwcm9nLT5hdXgtPmppdF9kYXRhID0gaml0X2RhdGE7CisJfQorCisJY3R4 ID0gJmppdF9kYXRhLT5jdHg7CisKKwlpZiAoY3R4LT5vZmZzZXQpIHsKKwkJZXh0cmFfcGFzcyA9 IHRydWU7CisJCWltYWdlX3NpemUgPSBzaXplb2YodTMyKSAqIGN0eC0+bmluc25zOworCQlnb3Rv IHNraXBfaW5pdF9jdHg7CisJfQorCisJY3R4LT5wcm9nID0gcHJvZzsKKwljdHgtPm9mZnNldCA9 IGtjYWxsb2MocHJvZy0+bGVuLCBzaXplb2YoaW50KSwgR0ZQX0tFUk5FTCk7CisJaWYgKCFjdHgt Pm9mZnNldCkgeworCQlwcm9nID0gb3JpZ19wcm9nOworCQlnb3RvIG91dF9vZmZzZXQ7CisJfQor CisJLyogRmlyc3QgcGFzcyBnZW5lcmF0ZXMgdGhlIGN0eC0+b2Zmc2V0LCBidXQgZG9lcyBub3Qg ZW1pdCBhbiBpbWFnZS4gKi8KKwlpZiAoYnVpbGRfYm9keShjdHgsIGV4dHJhX3Bhc3MpKSB7CisJ CXByb2cgPSBvcmlnX3Byb2c7CisJCWdvdG8gb3V0X29mZnNldDsKKwl9CisJYnVpbGRfcHJvbG9n dWUoY3R4KTsKKwljdHgtPmVwaWxvZ3VlX29mZnNldCA9IGN0eC0+bmluc25zOworCWJ1aWxkX2Vw aWxvZ3VlKGN0eCk7CisKKwkvKiBBbGxvY2F0ZSBpbWFnZSwgbm93IHRoYXQgd2Uga25vdyB0aGUg c2l6ZS4gKi8KKwlpbWFnZV9zaXplID0gc2l6ZW9mKHUzMikgKiBjdHgtPm5pbnNuczsKKwlqaXRf ZGF0YS0+aGVhZGVyID0gYnBmX2ppdF9iaW5hcnlfYWxsb2MoaW1hZ2Vfc2l6ZSwgJmppdF9kYXRh LT5pbWFnZSwKKwkJCQkJCXNpemVvZih1MzIpLAorCQkJCQkJYnBmX2ZpbGxfaWxsX2luc25zKTsK KwlpZiAoIWppdF9kYXRhLT5oZWFkZXIpIHsKKwkJcHJvZyA9IG9yaWdfcHJvZzsKKwkJZ290byBv dXRfb2Zmc2V0OworCX0KKworCS8qIFNlY29uZCwgcmVhbCBwYXNzLCB0aGF0IGFjdXRhbGx5IGVt aXRzIHRoZSBpbWFnZS4gKi8KKwljdHgtPmluc25zID0gKHUzMiAqKWppdF9kYXRhLT5pbWFnZTsK K3NraXBfaW5pdF9jdHg6CisJY3R4LT5uaW5zbnMgPSAwOworCisJYnVpbGRfcHJvbG9ndWUoY3R4 KTsKKwlpZiAoYnVpbGRfYm9keShjdHgsIGV4dHJhX3Bhc3MpKSB7CisJCWJwZl9qaXRfYmluYXJ5 X2ZyZWUoaml0X2RhdGEtPmhlYWRlcik7CisJCXByb2cgPSBvcmlnX3Byb2c7CisJCWdvdG8gb3V0 X29mZnNldDsKKwl9CisJYnVpbGRfZXBpbG9ndWUoY3R4KTsKKworCWlmIChicGZfaml0X2VuYWJs ZSA+IDEpCisJCWJwZl9qaXRfZHVtcChwcm9nLT5sZW4sIGltYWdlX3NpemUsIDIsIGN0eC0+aW5z bnMpOworCisJcHJvZy0+YnBmX2Z1bmMgPSAodm9pZCAqKWN0eC0+aW5zbnM7CisJcHJvZy0+aml0 ZWQgPSAxOworCXByb2ctPmppdGVkX2xlbiA9IGltYWdlX3NpemU7CisKKwlicGZfZmx1c2hfaWNh Y2hlKGppdF9kYXRhLT5oZWFkZXIsICh1OCAqKWN0eC0+aW5zbnMgKyBjdHgtPm5pbnNucyk7CisK KwlpZiAoIXByb2ctPmlzX2Z1bmMgfHwgZXh0cmFfcGFzcykgeworb3V0X29mZnNldDoKKwkJa2Zy ZWUoY3R4LT5vZmZzZXQpOworCQlrZnJlZShqaXRfZGF0YSk7CisJCXByb2ctPmF1eC0+aml0X2Rh dGEgPSBOVUxMOworCX0KK291dDoKKwlpZiAodG1wX2JsaW5kZWQpCisJCWJwZl9qaXRfcHJvZ19y ZWxlYXNlX290aGVyKHByb2csIHByb2cgPT0gb3JpZ19wcm9nID8KKwkJCQkJICAgdG1wIDogb3Jp Z19wcm9nKTsKIAlyZXR1cm4gcHJvZzsKIH0KLS0gCjIuMTkuMQoKCl9fX19fX19fX19fX19fX19f X19fX19fX19fX19fX19fX19fX19fX19fX19fX19fCmxpbnV4LXJpc2N2IG1haWxpbmcgbGlzdAps aW51eC1yaXNjdkBsaXN0cy5pbmZyYWRlYWQub3JnCmh0dHA6Ly9saXN0cy5pbmZyYWRlYWQub3Jn L21haWxtYW4vbGlzdGluZm8vbGludXgtcmlzY3YK