struct work_struct         recover_work;
        struct mlx5e_ptpsq        *ptpsq;
        cqe_ts_to_ns               ptp_cyc2time;
-       u16                        max_sq_wqebbs;
 } ____cacheline_aligned_in_smp;
 
 struct mlx5e_dma_info {
        /* control path */
        struct mlx5_wq_ctrl        wq_ctrl;
        struct mlx5e_channel      *channel;
-       u16                        max_sq_wqebbs;
 } ____cacheline_aligned_in_smp;
 
 struct mlx5e_ktls_resync_resp;
 
                is_redirect ?
                        &c->priv->channel_stats[c->ix]->xdpsq :
                        &c->priv->channel_stats[c->ix]->rq_xdpsq;
-       sq->max_sq_wqebbs = mlx5e_get_max_sq_wqebbs(mdev);
-       sq->stop_room = MLX5E_STOP_ROOM(sq->max_sq_wqebbs);
-       sq->max_sq_mpw_wqebbs = mlx5e_get_sw_max_sq_mpw_wqebbs(sq->max_sq_wqebbs);
+       sq->stop_room = MLX5E_STOP_ROOM(mlx5e_get_max_sq_wqebbs(mdev));
+       sq->max_sq_mpw_wqebbs =
+               mlx5e_get_sw_max_sq_mpw_wqebbs(mlx5e_get_max_sq_wqebbs(mdev));
 
        param->wq.db_numa_node = cpu_to_node(c->cpu);
        err = mlx5_wq_cyc_create(mdev, ¶m->wq, sqc_wq, wq, &sq->wq_ctrl);
        sq->uar_map   = mdev->mlx5e_res.hw_objs.bfreg.map;
        sq->min_inline_mode = params->tx_min_inline_mode;
        sq->hw_mtu    = MLX5E_SW2HW_MTU(params, params->sw_mtu);
-       sq->max_sq_wqebbs = mlx5e_get_max_sq_wqebbs(mdev);
-       sq->max_sq_mpw_wqebbs = mlx5e_get_sw_max_sq_mpw_wqebbs(sq->max_sq_wqebbs);
+       sq->max_sq_mpw_wqebbs =
+               mlx5e_get_sw_max_sq_mpw_wqebbs(mlx5e_get_max_sq_wqebbs(mdev));
        INIT_WORK(&sq->recover_work, mlx5e_tx_err_cqe_work);
        if (!MLX5_CAP_ETH(mdev, wqe_vlan_insert))
                set_bit(MLX5E_SQ_STATE_VLAN_NEED_L2_INLINE, &sq->state);