return stack->slot_type[BPF_REG_SIZE - 1] == STACK_SPILL;
 }
 
+static bool is_spilled_scalar_reg(const struct bpf_stack_state *stack)
+{
+       return stack->slot_type[BPF_REG_SIZE - 1] == STACK_SPILL &&
+              stack->spilled_ptr.type == SCALAR_VALUE;
+}
+
 static void scrub_spilled_slot(u8 *stype)
 {
        if (*stype != STACK_INVALID)
        return btf_name_by_offset(desc_btf, func->name_off);
 }
 
+static inline void bt_init(struct backtrack_state *bt, u32 frame)
+{
+       bt->frame = frame;
+}
+
+static inline void bt_reset(struct backtrack_state *bt)
+{
+       struct bpf_verifier_env *env = bt->env;
+
+       memset(bt, 0, sizeof(*bt));
+       bt->env = env;
+}
+
+static inline u32 bt_empty(struct backtrack_state *bt)
+{
+       u64 mask = 0;
+       int i;
+
+       for (i = 0; i <= bt->frame; i++)
+               mask |= bt->reg_masks[i] | bt->stack_masks[i];
+
+       return mask == 0;
+}
+
+static inline int bt_subprog_enter(struct backtrack_state *bt)
+{
+       if (bt->frame == MAX_CALL_FRAMES - 1) {
+               verbose(bt->env, "BUG subprog enter from frame %d\n", bt->frame);
+               WARN_ONCE(1, "verifier backtracking bug");
+               return -EFAULT;
+       }
+       bt->frame++;
+       return 0;
+}
+
+static inline int bt_subprog_exit(struct backtrack_state *bt)
+{
+       if (bt->frame == 0) {
+               verbose(bt->env, "BUG subprog exit from frame 0\n");
+               WARN_ONCE(1, "verifier backtracking bug");
+               return -EFAULT;
+       }
+       bt->frame--;
+       return 0;
+}
+
+static inline void bt_set_frame_reg(struct backtrack_state *bt, u32 frame, u32 reg)
+{
+       bt->reg_masks[frame] |= 1 << reg;
+}
+
+static inline void bt_clear_frame_reg(struct backtrack_state *bt, u32 frame, u32 reg)
+{
+       bt->reg_masks[frame] &= ~(1 << reg);
+}
+
+static inline void bt_set_reg(struct backtrack_state *bt, u32 reg)
+{
+       bt_set_frame_reg(bt, bt->frame, reg);
+}
+
+static inline void bt_clear_reg(struct backtrack_state *bt, u32 reg)
+{
+       bt_clear_frame_reg(bt, bt->frame, reg);
+}
+
+static inline void bt_set_frame_slot(struct backtrack_state *bt, u32 frame, u32 slot)
+{
+       bt->stack_masks[frame] |= 1ull << slot;
+}
+
+static inline void bt_clear_frame_slot(struct backtrack_state *bt, u32 frame, u32 slot)
+{
+       bt->stack_masks[frame] &= ~(1ull << slot);
+}
+
+static inline void bt_set_slot(struct backtrack_state *bt, u32 slot)
+{
+       bt_set_frame_slot(bt, bt->frame, slot);
+}
+
+static inline void bt_clear_slot(struct backtrack_state *bt, u32 slot)
+{
+       bt_clear_frame_slot(bt, bt->frame, slot);
+}
+
+static inline u32 bt_frame_reg_mask(struct backtrack_state *bt, u32 frame)
+{
+       return bt->reg_masks[frame];
+}
+
+static inline u32 bt_reg_mask(struct backtrack_state *bt)
+{
+       return bt->reg_masks[bt->frame];
+}
+
+static inline u64 bt_frame_stack_mask(struct backtrack_state *bt, u32 frame)
+{
+       return bt->stack_masks[frame];
+}
+
+static inline u64 bt_stack_mask(struct backtrack_state *bt)
+{
+       return bt->stack_masks[bt->frame];
+}
+
+static inline bool bt_is_reg_set(struct backtrack_state *bt, u32 reg)
+{
+       return bt->reg_masks[bt->frame] & (1 << reg);
+}
+
+static inline bool bt_is_slot_set(struct backtrack_state *bt, u32 slot)
+{
+       return bt->stack_masks[bt->frame] & (1ull << slot);
+}
+
 /* For given verifier state backtrack_insn() is called from the last insn to
  * the first insn. Its purpose is to compute a bitmask of registers and
  * stack slots that needs precision in the parent verifier state.
  */
 static int backtrack_insn(struct bpf_verifier_env *env, int idx,
-                         u32 *reg_mask, u64 *stack_mask)
+                         struct backtrack_state *bt)
 {
        const struct bpf_insn_cbs cbs = {
                .cb_call        = disasm_kfunc_name,
        u8 class = BPF_CLASS(insn->code);
        u8 opcode = BPF_OP(insn->code);
        u8 mode = BPF_MODE(insn->code);
-       u32 dreg = 1u << insn->dst_reg;
-       u32 sreg = 1u << insn->src_reg;
+       u32 dreg = insn->dst_reg;
+       u32 sreg = insn->src_reg;
        u32 spi;
 
        if (insn->code == 0)
                return 0;
        if (env->log.level & BPF_LOG_LEVEL2) {
-               verbose(env, "regs=%x stack=%llx before ", *reg_mask, *stack_mask);
+               verbose(env, "regs=%x stack=%llx before ", bt_reg_mask(bt), bt_stack_mask(bt));
                verbose(env, "%d: ", idx);
                print_bpf_insn(&cbs, insn, env->allow_ptr_leaks);
        }
 
        if (class == BPF_ALU || class == BPF_ALU64) {
-               if (!(*reg_mask & dreg))
+               if (!bt_is_reg_set(bt, dreg))
                        return 0;
                if (opcode == BPF_MOV) {
                        if (BPF_SRC(insn->code) == BPF_X) {
                                 * dreg needs precision after this insn
                                 * sreg needs precision before this insn
                                 */
-                               *reg_mask &= ~dreg;
-                               *reg_mask |= sreg;
+                               bt_clear_reg(bt, dreg);
+                               bt_set_reg(bt, sreg);
                        } else {
                                /* dreg = K
                                 * dreg needs precision after this insn.
                                 * as precise=true in this verifier state.
                                 * No further markings in parent are necessary
                                 */
-                               *reg_mask &= ~dreg;
+                               bt_clear_reg(bt, dreg);
                        }
                } else {
                        if (BPF_SRC(insn->code) == BPF_X) {
                                 * both dreg and sreg need precision
                                 * before this insn
                                 */
-                               *reg_mask |= sreg;
+                               bt_set_reg(bt, sreg);
                        } /* else dreg += K
                           * dreg still needs precision before this insn
                           */
                }
        } else if (class == BPF_LDX) {
-               if (!(*reg_mask & dreg))
+               if (!bt_is_reg_set(bt, dreg))
                        return 0;
-               *reg_mask &= ~dreg;
+               bt_clear_reg(bt, dreg);
 
                /* scalars can only be spilled into stack w/o losing precision.
                 * Load from any other memory can be zero extended.
                        WARN_ONCE(1, "verifier backtracking bug");
                        return -EFAULT;
                }
-               *stack_mask |= 1ull << spi;
+               bt_set_slot(bt, spi);
        } else if (class == BPF_STX || class == BPF_ST) {
-               if (*reg_mask & dreg)
+               if (bt_is_reg_set(bt, dreg))
                        /* stx & st shouldn't be using _scalar_ dst_reg
                         * to access memory. It means backtracking
                         * encountered a case of pointer subtraction.
                        WARN_ONCE(1, "verifier backtracking bug");
                        return -EFAULT;
                }
-               if (!(*stack_mask & (1ull << spi)))
+               if (!bt_is_slot_set(bt, spi))
                        return 0;
-               *stack_mask &= ~(1ull << spi);
+               bt_clear_slot(bt, spi);
                if (class == BPF_STX)
-                       *reg_mask |= sreg;
+                       bt_set_reg(bt, sreg);
        } else if (class == BPF_JMP || class == BPF_JMP32) {
                if (opcode == BPF_CALL) {
                        if (insn->src_reg == BPF_PSEUDO_CALL)
                        if (insn->src_reg == BPF_PSEUDO_KFUNC_CALL && insn->imm == 0)
                                return -ENOTSUPP;
                        /* regular helper call sets R0 */
-                       *reg_mask &= ~1;
-                       if (*reg_mask & 0x3f) {
+                       bt_clear_reg(bt, BPF_REG_0);
+                       if (bt_reg_mask(bt) & BPF_REGMASK_ARGS) {
                                /* if backtracing was looking for registers R1-R5
                                 * they should have been found already.
                                 */
-                               verbose(env, "BUG regs %x\n", *reg_mask);
+                               verbose(env, "BUG regs %x\n", bt_reg_mask(bt));
                                WARN_ONCE(1, "verifier backtracking bug");
                                return -EFAULT;
                        }
                } else if (opcode == BPF_EXIT) {
                        return -ENOTSUPP;
                } else if (BPF_SRC(insn->code) == BPF_X) {
-                       if (!(*reg_mask & (dreg | sreg)))
+                       if (!bt_is_reg_set(bt, dreg) && !bt_is_reg_set(bt, sreg))
                                return 0;
                        /* dreg <cond> sreg
                         * Both dreg and sreg need precision before
                         * before it would be equally necessary to
                         * propagate it to dreg.
                         */
-                       *reg_mask |= (sreg | dreg);
+                       bt_set_reg(bt, dreg);
+                       bt_set_reg(bt, sreg);
                         /* else dreg <cond> K
                          * Only dreg still needs precision before
                          * this insn, so for the K-based conditional
                          */
                }
        } else if (class == BPF_LD) {
-               if (!(*reg_mask & dreg))
+               if (!bt_is_reg_set(bt, dreg))
                        return 0;
-               *reg_mask &= ~dreg;
+               bt_clear_reg(bt, dreg);
                /* It's ld_imm64 or ld_abs or ld_ind.
                 * For ld_imm64 no further tracking of precision
                 * into parent is necessary
 static int __mark_chain_precision(struct bpf_verifier_env *env, int frame, int regno,
                                  int spi)
 {
+       struct backtrack_state *bt = &env->bt;
        struct bpf_verifier_state *st = env->cur_state;
        int first_idx = st->first_insn_idx;
        int last_idx = env->insn_idx;
        struct bpf_func_state *func;
        struct bpf_reg_state *reg;
-       u32 reg_mask = regno >= 0 ? 1u << regno : 0;
-       u64 stack_mask = spi >= 0 ? 1ull << spi : 0;
        bool skip_first = true;
-       bool new_marks = false;
        int i, err;
 
        if (!env->bpf_capable)
                return 0;
 
+       /* set frame number from which we are starting to backtrack */
+       bt_init(bt, frame);
+
        /* Do sanity checks against current state of register and/or stack
         * slot, but don't set precise flag in current state, as precision
         * tracking in the current state is unnecessary.
                        WARN_ONCE(1, "backtracing misuse");
                        return -EFAULT;
                }
-               new_marks = true;
+               bt_set_reg(bt, regno);
        }
 
        while (spi >= 0) {
-               if (!is_spilled_reg(&func->stack[spi])) {
-                       stack_mask = 0;
+               if (!is_spilled_scalar_reg(&func->stack[spi]))
                        break;
-               }
-               reg = &func->stack[spi].spilled_ptr;
-               if (reg->type != SCALAR_VALUE) {
-                       stack_mask = 0;
-                       break;
-               }
-               new_marks = true;
+               bt_set_slot(bt, spi);
                break;
        }
 
-       if (!new_marks)
-               return 0;
-       if (!reg_mask && !stack_mask)
+       if (bt_empty(bt))
                return 0;
 
        for (;;) {
                        if (st->curframe == 0 &&
                            st->frame[0]->subprogno > 0 &&
                            st->frame[0]->callsite == BPF_MAIN_FUNC &&
-                           stack_mask == 0 && (reg_mask & ~0x3e) == 0) {
-                               bitmap_from_u64(mask, reg_mask);
+                           bt_stack_mask(bt) == 0 &&
+                           (bt_reg_mask(bt) & ~BPF_REGMASK_ARGS) == 0) {
+                               bitmap_from_u64(mask, bt_reg_mask(bt));
                                for_each_set_bit(i, mask, 32) {
                                        reg = &st->frame[0]->regs[i];
                                        if (reg->type != SCALAR_VALUE) {
-                                               reg_mask &= ~(1u << i);
+                                               bt_clear_reg(bt, i);
                                                continue;
                                        }
                                        reg->precise = true;
                                return 0;
                        }
 
-                       verbose(env, "BUG backtracing func entry subprog %d reg_mask %x stack_mask %llx\n",
-                               st->frame[0]->subprogno, reg_mask, stack_mask);
+                       verbose(env, "BUG backtracking func entry subprog %d reg_mask %x stack_mask %llx\n",
+                               st->frame[0]->subprogno, bt_reg_mask(bt), bt_stack_mask(bt));
                        WARN_ONCE(1, "verifier backtracking bug");
                        return -EFAULT;
                }
                                err = 0;
                                skip_first = false;
                        } else {
-                               err = backtrack_insn(env, i, ®_mask, &stack_mask);
+                               err = backtrack_insn(env, i, bt);
                        }
                        if (err == -ENOTSUPP) {
                                mark_all_scalars_precise(env, st);
+                               bt_reset(bt);
                                return 0;
                        } else if (err) {
                                return err;
                        }
-                       if (!reg_mask && !stack_mask)
+                       if (bt_empty(bt))
                                /* Found assignment(s) into tracked register in this state.
                                 * Since this state is already marked, just return.
                                 * Nothing to be tracked further in the parent state.
                if (!st)
                        break;
 
-               new_marks = false;
                func = st->frame[frame];
-               bitmap_from_u64(mask, reg_mask);
+               bitmap_from_u64(mask, bt_reg_mask(bt));
                for_each_set_bit(i, mask, 32) {
                        reg = &func->regs[i];
                        if (reg->type != SCALAR_VALUE) {
-                               reg_mask &= ~(1u << i);
+                               bt_clear_reg(bt, i);
                                continue;
                        }
-                       if (!reg->precise)
-                               new_marks = true;
-                       reg->precise = true;
+                       if (reg->precise)
+                               bt_clear_reg(bt, i);
+                       else
+                               reg->precise = true;
                }
 
-               bitmap_from_u64(mask, stack_mask);
+               bitmap_from_u64(mask, bt_stack_mask(bt));
                for_each_set_bit(i, mask, 64) {
                        if (i >= func->allocated_stack / BPF_REG_SIZE) {
                                /* the sequence of instructions:
                                 * In such case fallback to conservative.
                                 */
                                mark_all_scalars_precise(env, st);
+                               bt_reset(bt);
                                return 0;
                        }
 
-                       if (!is_spilled_reg(&func->stack[i])) {
-                               stack_mask &= ~(1ull << i);
+                       if (!is_spilled_scalar_reg(&func->stack[i])) {
+                               bt_clear_slot(bt, i);
                                continue;
                        }
                        reg = &func->stack[i].spilled_ptr;
-                       if (reg->type != SCALAR_VALUE) {
-                               stack_mask &= ~(1ull << i);
-                               continue;
-                       }
-                       if (!reg->precise)
-                               new_marks = true;
-                       reg->precise = true;
+                       if (reg->precise)
+                               bt_clear_slot(bt, i);
+                       else
+                               reg->precise = true;
                }
                if (env->log.level & BPF_LOG_LEVEL2) {
                        verbose(env, "parent %s regs=%x stack=%llx marks:",
-                               new_marks ? "didn't have" : "already had",
-                               reg_mask, stack_mask);
+                               !bt_empty(bt) ? "didn't have" : "already had",
+                               bt_reg_mask(bt), bt_stack_mask(bt));
                        print_verifier_state(env, func, true);
                }
 
-               if (!reg_mask && !stack_mask)
-                       break;
-               if (!new_marks)
+               if (bt_empty(bt))
                        break;
 
                last_idx = st->last_insn_idx;
        if (!env)
                return -ENOMEM;
 
+       env->bt.env = env;
+
        len = (*prog)->len;
        env->insn_aux_data =
                vzalloc(array_size(sizeof(struct bpf_insn_aux_data), len));