return BPF_CLASS(insn->code) == BPF_ST && BPF_MODE(insn->code) == BPF_MEM;
 }
 
+static int get_reg_width(struct bpf_reg_state *reg)
+{
+       return fls64(reg->umax_value);
+}
+
 /* check_stack_{read,write}_fixed_off functions track spill/fill of registers,
  * stack boundary and alignment are checked in check_mem_access()
  */
        if (reg && !(off % BPF_REG_SIZE) && register_is_bounded(reg) && env->bpf_capable) {
                save_register_state(env, state, spi, reg, size);
                /* Break the relation on a narrowing spill. */
-               if (fls64(reg->umax_value) > BITS_PER_BYTE * size)
+               if (get_reg_width(reg) > BITS_PER_BYTE * size)
                        state->stack[spi].spilled_ptr.id = 0;
        } else if (!reg && !(off % BPF_REG_SIZE) && is_bpf_st_mem(insn) &&
                   insn->imm != 0 && env->bpf_capable) {
                                        return -EACCES;
                                } else if (src_reg->type == SCALAR_VALUE) {
                                        if (insn->off == 0) {
-                                               bool is_src_reg_u32 = src_reg->umax_value <= U32_MAX;
+                                               bool is_src_reg_u32 = get_reg_width(src_reg) <= 32;
 
                                                if (is_src_reg_u32)
                                                        assign_scalar_id_before_mov(env, src_reg);