ena = 0;
        if ((bp->flags & BNXT_FLAG_ROCE_CAP) && !is_kdump_kernel()) {
                pg_lvl = 2;
-               extra_qps = min_t(u32, 65536, max_qps - l2_qps - qp1_qps);
-               /* allocate extra qps if fw supports RoCE fast qp destroy feature */
-               extra_qps += fast_qpmd_qps;
-               extra_srqs = min_t(u32, 8192, max_srqs - srqs);
+               if (BNXT_SW_RES_LMT(bp)) {
+                       extra_qps = max_qps - l2_qps - qp1_qps;
+                       extra_srqs = max_srqs - srqs;
+               } else {
+                       extra_qps = min_t(u32, 65536,
+                                         max_qps - l2_qps - qp1_qps);
+                       /* allocate extra qps if fw supports RoCE fast qp
+                        * destroy feature
+                        */
+                       extra_qps += fast_qpmd_qps;
+                       extra_srqs = min_t(u32, 8192, max_srqs - srqs);
+               }
                if (fast_qpmd_qps)
                        ena |= FUNC_BACKING_STORE_CFG_REQ_ENABLES_QP_FAST_QPMD;
        }
                goto skip_rdma;
 
        ctxm = &ctx->ctx_arr[BNXT_CTX_MRAV];
-       /* 128K extra is needed to accommodate static AH context
-        * allocation by f/w.
-        */
-       num_mr = min_t(u32, ctxm->max_entries / 2, 1024 * 256);
-       num_ah = min_t(u32, num_mr, 1024 * 128);
-       ctxm->split_entry_cnt = BNXT_CTX_MRAV_AV_SPLIT_ENTRY + 1;
-       if (!ctxm->mrav_av_entries || ctxm->mrav_av_entries > num_ah)
-               ctxm->mrav_av_entries = num_ah;
+       if (BNXT_SW_RES_LMT(bp) &&
+           ctxm->split_entry_cnt == BNXT_CTX_MRAV_AV_SPLIT_ENTRY + 1) {
+               num_ah = ctxm->mrav_av_entries;
+               num_mr = ctxm->max_entries - num_ah;
+       } else {
+               /* 128K extra is needed to accommodate static AH context
+                * allocation by f/w.
+                */
+               num_mr = min_t(u32, ctxm->max_entries / 2, 1024 * 256);
+               num_ah = min_t(u32, num_mr, 1024 * 128);
+               ctxm->split_entry_cnt = BNXT_CTX_MRAV_AV_SPLIT_ENTRY + 1;
+               if (!ctxm->mrav_av_entries || ctxm->mrav_av_entries > num_ah)
+                       ctxm->mrav_av_entries = num_ah;
+       }
 
        rc = bnxt_setup_ctxm_pg_tbls(bp, ctxm, num_mr + num_ah, 2);
        if (rc)
                bp->flags |= BNXT_FLAG_UDP_GSO_CAP;
        if (flags_ext2 & FUNC_QCAPS_RESP_FLAGS_EXT2_TX_PKT_TS_CMPL_SUPPORTED)
                bp->fw_cap |= BNXT_FW_CAP_TX_TS_CMP;
+       if (flags_ext2 &
+           FUNC_QCAPS_RESP_FLAGS_EXT2_SW_MAX_RESOURCE_LIMITS_SUPPORTED)
+               bp->fw_cap |= BNXT_FW_CAP_SW_MAX_RESOURCE_LIMITS;
        if (BNXT_PF(bp) &&
            (flags_ext2 & FUNC_QCAPS_RESP_FLAGS_EXT2_ROCE_VF_RESOURCE_MGMT_SUPPORTED))
                bp->fw_cap |= BNXT_FW_CAP_ROCE_VF_RESC_MGMT_SUPPORTED;
 
        #define BNXT_FW_CAP_CFA_NTUPLE_RX_EXT_IP_PROTO  BIT_ULL(38)
        #define BNXT_FW_CAP_CFA_RFS_RING_TBL_IDX_V3     BIT_ULL(39)
        #define BNXT_FW_CAP_VNIC_RE_FLUSH               BIT_ULL(40)
+       #define BNXT_FW_CAP_SW_MAX_RESOURCE_LIMITS      BIT_ULL(41)
 
        u32                     fw_dbg_cap;
 
        ((bp)->fw_cap & BNXT_FW_CAP_ENABLE_RDMA_SRIOV)
 #define BNXT_ROCE_VF_RESC_CAP(bp)      \
        ((bp)->fw_cap & BNXT_FW_CAP_ROCE_VF_RESC_MGMT_SUPPORTED)
+#define BNXT_SW_RES_LMT(bp)            \
+       ((bp)->fw_cap & BNXT_FW_CAP_SW_MAX_RESOURCE_LIMITS)
 
        u32                     hwrm_spec_code;
        u16                     hwrm_cmd_seq;