#define NVME_GET(name, value) \
(((value) >> NVME_##name##_SHIFT) & NVME_##name##_MASK)
+/**
+ * NVME_SET() - set field into complex value
+ * @name: The name of the sub-field within an nvme value
+ * @value: The value to be set
+ *
+ * Returns: The 'name' field from 'value'
+ */
+#define NVME_SET(name, value) \
+ (((value) & NVME_##name##_MASK) << NVME_##name##_SHIFT)
+
/**
* cpu_to_le16() -
* @x: 16-bit CPU value to turn to little endian.
return le32_to_cpu(*p) | ((uint64_t)le32_to_cpu(*(p + 1)) << 32);
}
-#define NVME_REG_VALUE(name, value) NVME_GET(name, value)
-
enum nvme_cap {
NVME_CAP_MQES_SHIFT = 0,
NVME_CAP_CQR_SHIFT = 16,
NVME_CAP_CSS_ADMIN = 1 << 7,
};
-#define NVME_CAP_MQES(cap) NVME_REG_VALUE(CAP_MQES, cap)
-#define NVME_CAP_CQR(cap) NVME_REG_VALUE(CAP_CQR, cap)
-#define NVME_CAP_AMS(cap) NVME_REG_VALUE(CAP_AMS, cap)
-#define NVME_CAP_TO(cap) NVME_REG_VALUE(CAP_TO, cap)
-#define NVME_CAP_DSTRD(cap) NVME_REG_VALUE(CAP_DSTRD, cap)
-#define NVME_CAP_NSSRC(cap) NVME_REG_VALUE(CAP_NSSRC, cap)
-#define NVME_CAP_CSS(cap) NVME_REG_VALUE(CAP_CSS, cap)
-#define NVME_CAP_BPS(cap) NVME_REG_VALUE(CAP_BPS, cap)
-#define NVME_CAP_MPSMIN(cap) NVME_REG_VALUE(CAP_MPSMIN, cap)
-#define NVME_CAP_MPSMAX(cap) NVME_REG_VALUE(CAP_MPSMAX, cap)
-#define NVME_CAP_CMBS(cap) NVME_REG_VALUE(CAP_CMBS, cap)
-#define NVME_CAP_PMRS(cap) NVME_REG_VALUE(CAP_PMRS, cap)
+#define NVME_CAP_MQES(cap) NVME_GET(CAP_MQES, cap)
+#define NVME_CAP_CQR(cap) NVME_GET(CAP_CQR, cap)
+#define NVME_CAP_AMS(cap) NVME_GET(CAP_AMS, cap)
+#define NVME_CAP_TO(cap) NVME_GET(CAP_TO, cap)
+#define NVME_CAP_DSTRD(cap) NVME_GET(CAP_DSTRD, cap)
+#define NVME_CAP_NSSRC(cap) NVME_GET(CAP_NSSRC, cap)
+#define NVME_CAP_CSS(cap) NVME_GET(CAP_CSS, cap)
+#define NVME_CAP_BPS(cap) NVME_GET(CAP_BPS, cap)
+#define NVME_CAP_MPSMIN(cap) NVME_GET(CAP_MPSMIN, cap)
+#define NVME_CAP_MPSMAX(cap) NVME_GET(CAP_MPSMAX, cap)
+#define NVME_CAP_CMBS(cap) NVME_GET(CAP_CMBS, cap)
+#define NVME_CAP_PMRS(cap) NVME_GET(CAP_PMRS, cap)
enum nvme_vs {
NVME_VS_TER_SHIFT = 0,
NVME_VS_MJR_MASK = 0xffff,
};
-#define NVME_VS_TER(vs) NVME_REG_VALUE(VS_TER, vs)
-#define NVME_VS_MNR(vs) NVME_REG_VALUE(VS_MNR, vs)
-#define NVME_VS_MJR(vs) NVME_REG_VALUE(VS_MJR, vs)
+#define NVME_VS_TER(vs) NVME_GET(VS_TER, vs)
+#define NVME_VS_MNR(vs) NVME_GET(VS_MNR, vs)
+#define NVME_VS_MJR(vs) NVME_GET(VS_MJR, vs)
#define NVME_MAJOR(ver) NVME_VS_MJR(ver)
#define NVME_MINOR(ver) NVME_VS_MNR(ver)
NVME_CC_SHN_ABRUPT = 2,
};
-#define NVME_CC_EN(cc) NVME_REG_VALUE(CC_EN, cc)
-#define NVME_CC_CSS(cc) NVME_REG_VALUE(CC_CSS, cc)
-#define NVME_CC_MPS(cc) NVME_REG_VALUE(CC_MPS, cc)
-#define NVME_CC_AMS(cc) NVME_REG_VALUE(CC_AMS, cc)
-#define NVME_CC_SHN(cc) NVME_REG_VALUE(CC_SHN, cc)
-#define NVME_CC_IOSQES(cc) NVME_REG_VALUE(CC_IOSQES, cc)
-#define NVME_CC_IOCQES(cc) NVME_REG_VALUE(CC_IOCQES, cc)
+#define NVME_CC_EN(cc) NVME_GET(CC_EN, cc)
+#define NVME_CC_CSS(cc) NVME_GET(CC_CSS, cc)
+#define NVME_CC_MPS(cc) NVME_GET(CC_MPS, cc)
+#define NVME_CC_AMS(cc) NVME_GET(CC_AMS, cc)
+#define NVME_CC_SHN(cc) NVME_GET(CC_SHN, cc)
+#define NVME_CC_IOSQES(cc) NVME_GET(CC_IOSQES, cc)
+#define NVME_CC_IOCQES(cc) NVME_GET(CC_IOCQES, cc)
enum nvme_csts {
NVME_CSTS_RDY_SHIFT = 0,
NVME_CSTS_SHST_MASK = 3,
};
-#define NVME_CSTS_RDY(csts) NVME_REG_VALUE(CSTS_RDY, csts)
-#define NVME_CSTS_CFS(csts) NVME_REG_VALUE(CSTS_CFS, csts)
-#define NVME_CSTS_SHST(csts) NVME_REG_VALUE(CSTS_SHST, csts)
-#define NVME_CSTS_NSSRO(csts) NVME_REG_VALUE(CSTS_NSSRO, csts)
-#define NVME_CSTS_PP(csts) NVME_REG_VALUE(CSTS_PP, csts)
+#define NVME_CSTS_RDY(csts) NVME_GET(CSTS_RDY, csts)
+#define NVME_CSTS_CFS(csts) NVME_GET(CSTS_CFS, csts)
+#define NVME_CSTS_SHST(csts) NVME_GET(CSTS_SHST, csts)
+#define NVME_CSTS_NSSRO(csts) NVME_GET(CSTS_NSSRO, csts)
+#define NVME_CSTS_PP(csts) NVME_GET(CSTS_PP, csts)
enum nvme_aqa {
NVME_AQA_ASQS_SHIFT = 0,
NVME_AQA_ACQS_MASK = 0xfff,
};
-#define NVME_AQA_ASQS(aqa) NVME_REG_VALUE(AQA_ASQS, aqa)
-#define NVME_AQA_ACQS(aqa) NVME_REG_VALUE(AQA_ACQS, aqa)
+#define NVME_AQA_ASQS(aqa) NVME_GET(AQA_ASQS, aqa)
+#define NVME_AQA_ACQS(aqa) NVME_GET(AQA_ACQS, aqa)
enum nvme_cmbloc {
NVME_CMBLOC_BIR_SHIFT = 0,
NVME_CMBLOC_OFST_MASK = 0xfffff,
};
-#define NVME_CMBLOC_BIR(cmbloc) NVME_REG_VALUE(CMBLOC_BIR, cmbloc)
-#define NVME_CMBLOC_CQMMS(cmbloc) NVME_REG_VALUE(CMBLOC_CQMMS, cmbloc)
-#define NVME_CMBLOC_CQPDS(cmbloc) NVME_REG_VALUE(CMBLOC_CQPDS, cmbloc)
-#define NVME_CMBLOC_CDPLMS(cmbloc) NVME_REG_VALUE(CMBLOC_CDPLMS, cmbloc)
-#define NVME_CMBLOC_CDPCILS(cmbloc) NVME_REG_VALUE(CMBLOC_CDPCILS, cmbloc)
-#define NVME_CMBLOC_CDMMMS(cmbloc) NVME_REG_VALUE(CMBLOC_CDMMMS, cmbloc)
-#define NVME_CMBLOC_CQDA(cmbloc) NVME_REG_VALUE(CMBLOC_CQDA, cmbloc)
-#define NVME_CMBLOC_OFST(cmbloc) NVME_REG_VALUE(CMBLOC_OFST, cmbloc)
+#define NVME_CMBLOC_BIR(cmbloc) NVME_GET(CMBLOC_BIR, cmbloc)
+#define NVME_CMBLOC_CQMMS(cmbloc) NVME_GET(CMBLOC_CQMMS, cmbloc)
+#define NVME_CMBLOC_CQPDS(cmbloc) NVME_GET(CMBLOC_CQPDS, cmbloc)
+#define NVME_CMBLOC_CDPLMS(cmbloc) NVME_GET(CMBLOC_CDPLMS, cmbloc)
+#define NVME_CMBLOC_CDPCILS(cmbloc) NVME_GET(CMBLOC_CDPCILS, cmbloc)
+#define NVME_CMBLOC_CDMMMS(cmbloc) NVME_GET(CMBLOC_CDMMMS, cmbloc)
+#define NVME_CMBLOC_CQDA(cmbloc) NVME_GET(CMBLOC_CQDA, cmbloc)
+#define NVME_CMBLOC_OFST(cmbloc) NVME_GET(CMBLOC_OFST, cmbloc)
enum nvme_cmbsz {
NVME_CMBSZ_SQS_SHIFT = 0,
NVME_CMBSZ_SZU_64G = 6,
};
-#define NVME_CMBSZ_SQS(cmbsz) NVME_REG_VALUE(CMBSZ_SQS, cmbsz)
-#define NVME_CMBSZ_CQS(cmbsz) NVME_REG_VALUE(CMBSZ_CQS, cmbsz)
-#define NVME_CMBSZ_LISTS(cmbsz) NVME_REG_VALUE(CMBSZ_LISTS, cmbsz)
-#define NVME_CMBSZ_RDS(cmbsz) NVME_REG_VALUE(CMBSZ_RDS, cmbsz)
-#define NVME_CMBSZ_WDS(cmbsz) NVME_REG_VALUE(CMBSZ_WDS, cmbsz)
-#define NVME_CMBSZ_SZU(cmbsz) NVME_REG_VALUE(CMBSZ_SZU, cmbsz)
-#define NVME_CMBSZ_SZ(cmbsz) NVME_REG_VALUE(CMBSZ_SZ, cmbsz)
+#define NVME_CMBSZ_SQS(cmbsz) NVME_GET(CMBSZ_SQS, cmbsz)
+#define NVME_CMBSZ_CQS(cmbsz) NVME_GET(CMBSZ_CQS, cmbsz)
+#define NVME_CMBSZ_LISTS(cmbsz) NVME_GET(CMBSZ_LISTS, cmbsz)
+#define NVME_CMBSZ_RDS(cmbsz) NVME_GET(CMBSZ_RDS, cmbsz)
+#define NVME_CMBSZ_WDS(cmbsz) NVME_GET(CMBSZ_WDS, cmbsz)
+#define NVME_CMBSZ_SZU(cmbsz) NVME_GET(CMBSZ_SZU, cmbsz)
+#define NVME_CMBSZ_SZ(cmbsz) NVME_GET(CMBSZ_SZ, cmbsz)
/**
* nvme_cmb_size() - Calculate size of the controller memory buffer
NVME_BPINFO_BRS_READ_ERROR = 3,
};
-#define NVME_BPINFO_BPSZ(bpinfo) NVME_REG_VALUE(BPINFO_BPSZ, bpinfo)
-#define NVME_BPINFO_BRS(bpinfo) NVME_REG_VALUE(BPINFO_BRS, bpinfo)
-#define NVME_BPINFO_ABPID(bpinfo) NVME_REG_VALUE(BPINFO_ABPID, bpinfo)
+#define NVME_BPINFO_BPSZ(bpinfo) NVME_GET(BPINFO_BPSZ, bpinfo)
+#define NVME_BPINFO_BRS(bpinfo) NVME_GET(BPINFO_BRS, bpinfo)
+#define NVME_BPINFO_ABPID(bpinfo) NVME_GET(BPINFO_ABPID, bpinfo)
enum nvme_bprsel {
NVME_BPRSEL_BPRSZ_SHIFT = 0,
NVME_BPRSEL_BPID_MASK = 0x1,
};
-#define NVME_BPRSEL_BPRSZ(bprsel) NVME_REG_VALUE(BPRSEL_BPRSZ, bprsel)
-#define NVME_BPRSEL_BPROF(bprsel) NVME_REG_VALUE(BPRSEL_BPROF, bprsel)
-#define NVME_BPRSEL_BPID(bprsel) NVME_REG_VALUE(BPRSEL_BPID, bprsel)
+#define NVME_BPRSEL_BPRSZ(bprsel) NVME_GET(BPRSEL_BPRSZ, bprsel)
+#define NVME_BPRSEL_BPROF(bprsel) NVME_GET(BPRSEL_BPROF, bprsel)
+#define NVME_BPRSEL_BPID(bprsel) NVME_GET(BPRSEL_BPID, bprsel)
enum nvme_cmbmsc {
NVME_CMBMSC_CRE_SHIFT = 0,
};
static const __u64 NVME_CMBMSC_CBA_MASK = 0xfffffffffffffull;
-#define NVME_CMBMSC_CRE(cmbmsc) NVME_REG_VALUE(CMBMSC_CRE, cmbmsc)
-#define NVME_CMBMSC_CMSE(cmbmsc) NVME_REG_VALUE(CMBMSC_CMSE, cmbmsc)
-#define NVME_CMBMSC_CBA(cmbmsc) NVME_REG_VALUE(CMBMSC_CBA, cmbmsc)
+#define NVME_CMBMSC_CRE(cmbmsc) NVME_GET(CMBMSC_CRE, cmbmsc)
+#define NVME_CMBMSC_CMSE(cmbmsc) NVME_GET(CMBMSC_CMSE, cmbmsc)
+#define NVME_CMBMSC_CBA(cmbmsc) NVME_GET(CMBMSC_CBA, cmbmsc)
enum nvme_cmbsts {
NVME_CMBSTS_CBAI_SHIFT = 0,
NVME_CMBSTS_CBAI_MASK = 0x1,
};
-#define NVME_CMBSTS_CBAI(cmbsts) NVME_REG_VALUE(CMBSTS_CBAI, cmbsts)
+#define NVME_CMBSTS_CBAI(cmbsts) NVME_GET(CMBSTS_CBAI, cmbsts)
enum nvme_pmrcap {
NVME_PMRCAP_RDS_SHIFT = 3,
NVME_PMRCAP_PMRTU_60S = 1,
};
-#define NVME_PMRCAP_RDS(pmrcap) NVME_REG_VALUE(PMRCAP_RDS, pmrcap)
-#define NVME_PMRCAP_WDS(pmrcap) NVME_REG_VALUE(PMRCAP_WDS, pmrcap)
-#define NVME_PMRCAP_BIR(pmrcap) NVME_REG_VALUE(PMRCAP_BIR, pmrcap)
-#define NVME_PMRCAP_PMRTU(pmrcap) NVME_REG_VALUE(PMRCAP_PMRTU, pmrcap)
-#define NVME_PMRCAP_PMRWMB(pmrcap) NVME_REG_VALUE(PMRCAP_PMRWMB, pmrcap)
-#define NVME_PMRCAP_PMRTO(pmrcap) NVME_REG_VALUE(PMRCAP_PMRTO, pmrcap)
-#define NVME_PMRCAP_CMSS(pmrcap) NVME_REG_VALUE(PMRCAP_CMSS, pmrcap)
+#define NVME_PMRCAP_RDS(pmrcap) NVME_GET(PMRCAP_RDS, pmrcap)
+#define NVME_PMRCAP_WDS(pmrcap) NVME_GET(PMRCAP_WDS, pmrcap)
+#define NVME_PMRCAP_BIR(pmrcap) NVME_GET(PMRCAP_BIR, pmrcap)
+#define NVME_PMRCAP_PMRTU(pmrcap) NVME_GET(PMRCAP_PMRTU, pmrcap)
+#define NVME_PMRCAP_PMRWMB(pmrcap) NVME_GET(PMRCAP_PMRWMB, pmrcap)
+#define NVME_PMRCAP_PMRTO(pmrcap) NVME_GET(PMRCAP_PMRTO, pmrcap)
+#define NVME_PMRCAP_CMSS(pmrcap) NVME_GET(PMRCAP_CMSS, pmrcap)
enum nvme_pmrctl {
NVME_PMRCTL_EN_SHIFT = 0,
NVME_PMRCTL_EN_MASK = 0x1,
};
-#define NVME_PMRCTL_EN(pmrctl) NVME_REG_VALUE(PMRCTL_EN, pmrctl)
+#define NVME_PMRCTL_EN(pmrctl) NVME_GET(PMRCTL_EN, pmrctl)
enum nvme_pmrsts {
NVME_PMRSTS_ERR_SHIFT = 0,
NVME_PMRSTS_CBAI_MASK = 0x1,
};
-#define NVME_PMRSTS_ERR(pmrsts) NVME_REG_VALUE(PMRSTS_ERR, pmrsts)
-#define NVME_PMRSTS_NRDY(pmrsts) NVME_REG_VALUE(PMRSTS_NRDY, pmrsts)
-#define NVME_PMRSTS_HSTS(pmrsts) NVME_REG_VALUE(PMRSTS_HSTS, pmrsts)
-#define NVME_PMRSTS_CBAI(pmrsts) NVME_REG_VALUE(PMRSTS_CBAI, pmrsts)
+#define NVME_PMRSTS_ERR(pmrsts) NVME_GET(PMRSTS_ERR, pmrsts)
+#define NVME_PMRSTS_NRDY(pmrsts) NVME_GET(PMRSTS_NRDY, pmrsts)
+#define NVME_PMRSTS_HSTS(pmrsts) NVME_GET(PMRSTS_HSTS, pmrsts)
+#define NVME_PMRSTS_CBAI(pmrsts) NVME_GET(PMRSTS_CBAI, pmrsts)
enum nvme_pmrebs {
NVME_PMREBS_PMRSZU_SHIFT = 0,
NVME_PMREBS_PMRSZU_1G = 3,
};
-#define NVME_PMREBS_PMRSZU(pmrebs) NVME_REG_VALUE(PMREBS_PMRSZU, pmrebs)
-#define NVME_PMREBS_RBB(pmrebs) NVME_REG_VALUE(PMREBS_RBB, pmrebs)
-#define NVME_PMREBS_PMRWBZ(pmrebs) NVME_REG_VALUE(PMREBS_PMRWBZ, pmrebs)
+#define NVME_PMREBS_PMRSZU(pmrebs) NVME_GET(PMREBS_PMRSZU, pmrebs)
+#define NVME_PMREBS_RBB(pmrebs) NVME_GET(PMREBS_RBB, pmrebs)
+#define NVME_PMREBS_PMRWBZ(pmrebs) NVME_GET(PMREBS_PMRWBZ, pmrebs)
/**
* nvme_pmr_size() - Calculate size of persistent memory region elasticity
NVME_PMRSWTP_PMRSWTU_GBPS = 3,
};
-#define NVME_PMRSWTP_PMRSWTU(pmrswtp) NVME_REG_VALUE(PMRSWTP_PMRSWTU, pmrswtp)
-#define NVME_PMRSWTP_PMRSWTV(pmrswtp) NVME_REG_VALUE(PMRSWTP_PMRSWTU, pmrswtp)
+#define NVME_PMRSWTP_PMRSWTU(pmrswtp) NVME_GET(PMRSWTP_PMRSWTU, pmrswtp)
+#define NVME_PMRSWTP_PMRSWTV(pmrswtp) NVME_GET(PMRSWTP_PMRSWTU, pmrswtp)
/**
* nvme_pmr_throughput() - Calculate throughput of persistent memory buffer
};
static const __u64 NVME_PMRMSC_CBA_MASK = 0xfffffffffffffull;
-#define NVME_PMRMSC_CMSE(pmrmsc) NVME_REG_VALUE(PMRMSC_CMSE, pmrmsc)
-#define NVME_PMRMSC_CBA(pmrmsc) NVME_REG_VALUE(PMRMSC_CBA, pmrmsc)
+#define NVME_PMRMSC_CMSE(pmrmsc) NVME_GET(PMRMSC_CMSE, pmrmsc)
+#define NVME_PMRMSC_CBA(pmrmsc) NVME_GET(PMRMSC_CBA, pmrmsc)
/**
* enum nvme_psd_flags - Possible flag values in nvme power state descriptor
{
return nvme_get_attr(nvme_path_get_sysfs_dir(p), attr);
}
-
-enum {
- NVME_FEAT_ARB_BURST_MASK = 0x00000007,
- NVME_FEAT_ARB_LPW_MASK = 0x0000ff00,
- NVME_FEAT_ARB_MPW_MASK = 0x00ff0000,
- NVME_FEAT_ARB_HPW_MASK = 0xff000000,
- NVME_FEAT_PM_PS_MASK = 0x0000001f,
- NVME_FEAT_PM_WH_MASK = 0x000000e0,
- NVME_FEAT_LBAR_NR_MASK = 0x0000003f,
- NVME_FEAT_TT_TMPTH_MASK = 0x0000ffff,
- NVME_FEAT_TT_TMPSEL_MASK = 0x000f0000,
- NVME_FEAT_TT_THSEL_MASK = 0x00300000,
- NVME_FEAT_ER_TLER_MASK = 0x0000ffff,
- NVME_FEAT_ER_DULBE_MASK = 0x00010000,
- NVME_FEAT_VWC_WCE_MASK = 0x00000001,
- NVME_FEAT_NRQS_NSQR_MASK = 0x0000ffff,
- NVME_FEAT_NRQS_NCQR_MASK = 0xffff0000,
- NVME_FEAT_ICOAL_THR_MASK = 0x000000ff,
- NVME_FEAT_ICOAL_TIME_MASK = 0x0000ff00,
- NVME_FEAT_ICFG_IV_MASK = 0x0000ffff,
- NVME_FEAT_ICFG_CD_MASK = 0x00010000,
- NVME_FEAT_WA_DN_MASK = 0x00000001,
- NVME_FEAT_AE_SMART_MASK = 0x000000ff,
- NVME_FEAT_AE_NAN_MASK = 0x00000100,
- NVME_FEAT_AE_FW_MASK = 0x00000200,
- NVME_FEAT_AE_TELEM_MASK = 0x00000400,
- NVME_FEAT_AE_ANA_MASK = 0x00000800,
- NVME_FEAT_AE_PLA_MASK = 0x00001000,
- NVME_FEAT_AE_LBAS_MASK = 0x00002000,
- NVME_FEAT_AE_EGA_MASK = 0x00004000,
- NVME_FEAT_APST_APSTE_MASK = 0x00000001,
- NVME_FEAT_HMEM_EHM_MASK = 0x00000001,
- NVME_FEAT_HCTM_TMT2_MASK = 0x0000ffff,
- NVME_FEAT_HCTM_TMT1_MASK = 0xffff0000,
- NVME_FEAT_NOPS_NOPPME_MASK = 0x00000001,
- NVME_FEAT_RRL_RRL_MASK = 0x000000ff,
- NVME_FEAT_PLM_PLME_MASK = 0x00000001,
- NVME_FEAT_PLMW_WS_MASK = 0x00000007,
- NVME_FEAT_LBAS_LSIRI_MASK = 0x0000ffff,
- NVME_FEAT_LBAS_LSIPI_MASK = 0xffff0000,
- NVME_FEAT_SC_NODRM_MASK = 0x00000001,
- NVME_FEAT_EG_ENDGID_MASK = 0x0000ffff,
- NVME_FEAT_EG_EGCW_MASK = 0x00ff0000,
- NVME_FEAT_SPM_PBSLC_MASK = 0x000000ff,
- NVME_FEAT_HOSTID_EXHID_MASK = 0x00000001,
- NVME_FEAT_RM_REGPRE_MASK = 0x00000002,
- NVME_FEAT_RM_RESREL_MASK = 0x00000004,
- NVME_FEAT_RM_RESPRE_MASK = 0x00000008,
- NVME_FEAT_RP_PTPL_MASK = 0x00000001,
- NVME_FEAT_WP_WPS_MASK = 0x00000007,
-};
-
-#define shift(v, s, m) ((v & m) >> s)
-
-#define NVME_FEAT_ARB_BURST(v) shift(v, 0, NVME_FEAT_ARB_BURST_MASK)
-#define NVME_FEAT_ARB_LPW(v) shift(v, 8, NVME_FEAT_ARB_LPW_MASK)
-#define NVME_FEAT_ARB_MPW(v) shift(v, 16, NVME_FEAT_ARB_MPW_MASK)
-#define NVME_FEAT_ARB_HPW(v) shift(v, 24, NVME_FEAT_ARB_HPW_MASK)
-
-void nvme_feature_decode_arbitration(__u32 value, __u8 *ab, __u8 *lpw,
- __u8 *mpw, __u8 *hpw)
-{
- *ab = NVME_FEAT_ARB_BURST(value);
- *lpw = NVME_FEAT_ARB_LPW(value);
- *mpw = NVME_FEAT_ARB_MPW(value);
- *hpw = NVME_FEAT_ARB_HPW(value);
-};
-
-#define NVME_FEAT_PM_PS(v) shift(v, 0, NVME_FEAT_PM_PS_MASK)
-#define NVME_FEAT_PM_WH(v) shift(v, 5, NVME_FEAT_PM_WH_MASK)
-
-void nvme_feature_decode_power_mgmt(__u32 value, __u8 *ps, __u8 *wh)
-{
- *ps = NVME_FEAT_PM_PS(value);
- *wh = NVME_FEAT_PM_WH(value);
-}
-
-#define NVME_FEAT_LBAR_NR(v) shift(v, 0, NVME_FEAT_LBAR_NR_MASK)
-
-void nvme_feature_decode_lba_range(__u32 value, __u8 *num)
-{
- *num = NVME_FEAT_LBAR_NR(value);
-}
-
-#define NVME_FEAT_TT_TMPTH(v) shift(v, 0, NVME_FEAT_TT_TMPTH_MASK)
-#define NVME_FEAT_TT_TMPSEL(v) shift(v, 16, NVME_FEAT_TT_TMPSEL_MASK)
-#define NVME_FEAT_TT_THSEL(v) shift(v, 20, NVME_FEAT_TT_THSEL_MASK)
-
-void nvme_feature_decode_temp_threshold(__u32 value, __u16 *tmpth, __u8 *tmpsel, __u8 *thsel)
-{
- *tmpth = NVME_FEAT_TT_TMPTH(value);
- *tmpsel = NVME_FEAT_TT_TMPSEL(value);
- *thsel = NVME_FEAT_TT_THSEL(value);
-}
-
-#define NVME_FEAT_ER_TLER(v) shift(v, 0, NVME_FEAT_ER_TLER_MASK)
-#define NVME_FEAT_ER_DULBE(v) shift(v, 16, NVME_FEAT_ER_DULBE_MASK)
-
-void nvme_feature_decode_error_recovery(__u32 value, __u16 *tler, bool *dulbe)
-{
- *tler = NVME_FEAT_ER_TLER(value);
- *dulbe = NVME_FEAT_ER_DULBE(value);
-}
-
-#define NVME_FEAT_VWC_WCE(v) shift(v, 0, NVME_FEAT_VWC_WCE_MASK)
-
-void nvme_feature_decode_volatile_write_cache(__u32 value, bool *wce)
-{
- *wce = NVME_FEAT_VWC_WCE(value);
-}
-
-#define NVME_FEAT_NRQS_NSQR(v) shift(v, 0, NVME_FEAT_NRQS_NSQR_MASK)
-#define NVME_FEAT_NRQS_NCQR(v) shift(v, 16, NVME_FEAT_NRQS_NCQR_MASK)
-
-void nvme_feature_decode_number_of_queues(__u32 value, __u16 *nsqr, __u16 *ncqr)
-{
- *nsqr = NVME_FEAT_NRQS_NSQR(value);
- *ncqr = NVME_FEAT_NRQS_NCQR(value);
-}
-
-#define NVME_FEAT_ICOAL_THR(v) shift(v, 0, NVME_FEAT_ICOAL_THR_MASK)
-#define NVME_FEAT_ICOAL_TIME(v) shift(v, 8, NVME_FEAT_ICOAL_TIME_MASK)
-
-void nvme_feature_decode_interrupt_coalescing(__u32 value, __u8 *thr, __u8 *time)
-{
- *thr = NVME_FEAT_ICOAL_THR(value);
- *time = NVME_FEAT_ICOAL_TIME(value);
-}
-
-#define NVME_FEAT_ICFG_IV(v) shift(v, 0, NVME_FEAT_ICFG_IV_MASK)
-#define NVME_FEAT_ICFG_CD(v) shift(v, 16, NVME_FEAT_ICFG_CD_MASK)
-
-void nvme_feature_decode_interrupt_config(__u32 value, __u16 *iv, bool *cd)
-{
- *iv = NVME_FEAT_ICFG_IV(value);
- *cd = NVME_FEAT_ICFG_CD(value);
-}
-
-#define NVME_FEAT_WA_DN(v) shift(v, 0, NVME_FEAT_WA_DN_MASK)
-
-void nvme_feature_decode_write_atomicity(__u32 value, bool *dn)
-{
- *dn = NVME_FEAT_WA_DN(value);
-}
-
-#define NVME_FEAT_AE_SMART(v) shift(v, 0, NVME_FEAT_AE_SMART_MASK)
-#define NVME_FEAT_AE_NAN(v) shift(v, 8, NVME_FEAT_AE_NAN_MASK)
-#define NVME_FEAT_AE_FW(v) shift(v, 9, NVME_FEAT_AE_FW_MASK)
-#define NVME_FEAT_AE_TELEM(v) shift(v, 10, NVME_FEAT_AE_TELEM_MASK)
-#define NVME_FEAT_AE_ANA(v) shift(v, 11, NVME_FEAT_AE_ANA_MASK)
-#define NVME_FEAT_AE_PLA(v) shift(v, 12, NVME_FEAT_AE_PLA_MASK)
-#define NVME_FEAT_AE_LBAS(v) shift(v, 13, NVME_FEAT_AE_LBAS_MASK)
-#define NVME_FEAT_AE_EGA(v) shift(v, 14, NVME_FEAT_AE_EGA_MASK)
-
-void nvme_feature_decode_async_event_config(__u32 value, __u8 *smart,
- bool *nan, bool *fw, bool *telem, bool *ana, bool *pla, bool *lbas,
- bool *ega)
-{
- *smart = NVME_FEAT_AE_SMART(value);
- *nan = NVME_FEAT_AE_NAN(value);
- *fw = NVME_FEAT_AE_FW(value);
- *telem = NVME_FEAT_AE_TELEM(value);
- *ana = NVME_FEAT_AE_ANA(value);
- *pla = NVME_FEAT_AE_PLA(value);
- *lbas = NVME_FEAT_AE_LBAS(value);
- *ega = NVME_FEAT_AE_EGA(value);
-}
-
-#define NVME_FEAT_APST_APSTE(v) shift(v, 0, NVME_FEAT_APST_APSTE_MASK)
-
-void nvme_feature_decode_auto_power_state(__u32 value, bool *apste)
-{
- *apste = NVME_FEAT_APST_APSTE(value);
-}
-
-#define NVME_FEAT_HMEM_EHM(v) shift(v, 0, NVME_FEAT_HMEM_EHM_MASK)
-
-void nvme_feature_decode_host_memory_buffer(__u32 value, bool *ehm)
-{
- *ehm = NVME_FEAT_HMEM_EHM(value);
-}
-
-#define NVME_FEAT_HCTM_TMT2(v) shift(v, 0, NVME_FEAT_HCTM_TMT2_MASK)
-#define NVME_FEAT_HCTM_TMT1(v) shift(v, 16, NVME_FEAT_HCTM_TMT1_MASK)
-
-void nvme_feature_decode_host_thermal_mgmt(__u32 value, __u16 *tmt2, __u16 *tmt1)
-{
- *tmt2 = NVME_FEAT_HCTM_TMT2(value);
- *tmt1 = NVME_FEAT_HCTM_TMT1(value);
-}
-
-#define NVME_FEAT_NOPS_NOPPME(v) shift(v, 0, NVME_FEAT_NOPS_NOPPME_MASK)
-
-void nvme_feature_decode_non_op_power_config(__u32 value, bool *noppme)
-{
- *noppme = NVME_FEAT_NOPS_NOPPME(value);
-}
-
-#define NVME_FEAT_RRL_RRL(v) shift(v, 0, NVME_FEAT_RRL_RRL_MASK)
-
-void nvme_feature_decode_read_recovery_level_config(__u32 value, __u8 *rrl)
-{
- *rrl = NVME_FEAT_RRL_RRL(value);
-}
-
-#define NVME_FEAT_PLM_PLME(v) shift(v, 0, NVME_FEAT_PLM_PLME_MASK)
-
-void nvme_feature_decode_predictable_latency_mode_config(__u32 value, bool *plme)
-{
- *plme = NVME_FEAT_PLM_PLME(value);
-}
-
-#define NVME_FEAT_PLMW_WS(v) shift(v, 0, NVME_FEAT_PLMW_WS_MASK)
-
-void nvme_feature_decode_predictable_latency_mode_window(__u32 value, __u8 *ws)
-{
- *ws = NVME_FEAT_PLMW_WS(value);
-}
-
-#define NVME_FEAT_LBAS_LSIRI(v) shift(v, 0, NVME_FEAT_LBAS_LSIRI_MASK)
-#define NVME_FEAT_LBAS_LSIPI(v) shift(v, 16, NVME_FEAT_LBAS_LSIPI_MASK)
-
-void nvme_feature_decode_lba_status_attributes(__u32 value, __u16 *lsiri, __u16 *lsipi)
-{
- *lsiri = NVME_FEAT_LBAS_LSIRI(value);
- *lsipi = NVME_FEAT_LBAS_LSIPI(value);
-}
-
-#define NVME_FEAT_SC_NODRM(v) shift(v, 0, NVME_FEAT_SC_NODRM_MASK)
-
-void nvme_feature_decode_sanitize_config(__u32 value, bool *nodrm)
-{
- *nodrm = NVME_FEAT_SC_NODRM(value);
-}
-
-#define NVME_FEAT_EG_ENDGID(v) shift(v, 0, NVME_FEAT_EG_ENDGID_MASK)
-#define NVME_FEAT_EG_EGCW(v) shift(v, 16, NVME_FEAT_EG_EGCW_MASK)
-
-void nvme_feature_decode_endurance_group_event_config(__u32 value,
- __u16 *endgid, __u8 *endgcw)
-{
- *endgid = NVME_FEAT_EG_ENDGID(value);
- *endgcw = NVME_FEAT_EG_EGCW(value);
-}
-
-#define NVME_FEAT_SPM_PBSLC(v) shift(v, 0, NVME_FEAT_SPM_PBSLC_MASK)
-
-void nvme_feature_decode_software_progress_marker(__u32 value, __u8 *pbslc)
-{
- *pbslc = NVME_FEAT_SPM_PBSLC(value);
-}
-
-#define NVME_FEAT_HOSTID_EXHID(v) shift(v, 0, NVME_FEAT_HOSTID_EXHID_MASK)
-
-void nvme_feature_decode_host_identifier(__u32 value, bool *exhid)
-{
- *exhid = NVME_FEAT_HOSTID_EXHID(value);
-}
-
-#define NVME_FEAT_RM_REGPRE(v) shift(v, 1, NVME_FEAT_RM_REGPRE_MASK)
-#define NVME_FEAT_RM_RESREL(v) shift(v, 2, NVME_FEAT_RM_RESREL_MASK)
-#define NVME_FEAT_RM_RESPRE(v) shift(v, 3, NVME_FEAT_RM_RESPRE_MASK)
-
-void nvme_feature_decode_reservation_notification(__u32 value, bool *regpre, bool *resrel, bool *respre)
-{
- *regpre = NVME_FEAT_RM_REGPRE(value);
- *resrel = NVME_FEAT_RM_RESREL(value);
- *respre = NVME_FEAT_RM_RESPRE(value);
-}
-
-#define NVME_FEAT_RP_PTPL(v) shift(v, 0, NVME_FEAT_RP_PTPL_MASK)
-
-void nvme_feature_decode_reservation_persistance(__u32 value, bool *ptpl)
-{
- *ptpl = NVME_FEAT_RP_PTPL(value);
-}
-
-#define NVME_FEAT_WP_WPS(v) shift(v, 0, NVME_FEAT_WP_WPS_MASK)
-
-void nvme_feature_decode_namespace_write_protect(__u32 value, __u8 *wps)
-{
- *wps = NVME_FEAT_WP_WPS(value);
-}
s[l--] = '\0';
}
+enum {
+ NVME_FEAT_ARB_BURST_SHIFT = 0,
+ NVME_FEAT_ARB_BURST_MASK = 0x7,
+ NVME_FEAT_ARB_LPW_SHIFT = 8,
+ NVME_FEAT_ARB_LPW_MASK = 0xff,
+ NVME_FEAT_ARB_MPW_SHIFT = 16,
+ NVME_FEAT_ARB_MPW_MASK = 0xff,
+ NVME_FEAT_ARB_HPW_SHIFT = 24,
+ NVME_FEAT_ARB_HPW_MASK = 0xff,
+ NVME_FEAT_PM_PS_SHIFT = 0,
+ NVME_FEAT_PM_PS_MASK = 0x1f,
+ NVME_FEAT_PM_WH_SHIFT = 5,
+ NVME_FEAT_PM_WH_MASK = 0x7,
+ NVME_FEAT_LBAR_NR_SHIFT = 0,
+ NVME_FEAT_LBAR_NR_MASK = 0x3f,
+ NVME_FEAT_TT_TMPTH_SHIFT = 0,
+ NVME_FEAT_TT_TMPTH_MASK = 0xffff,
+ NVME_FEAT_TT_TMPSEL_SHIFT = 16,
+ NVME_FEAT_TT_TMPSEL_MASK = 0xf,
+ NVME_FEAT_TT_THSEL_SHIFT = 20,
+ NVME_FEAT_TT_THSEL_MASK = 0x3,
+ NVME_FEAT_ER_TLER_SHIFT = 0,
+ NVME_FEAT_ER_TLER_MASK = 0xffff,
+ NVME_FEAT_ER_DULBE_SHIFT = 16,
+ NVME_FEAT_ER_DULBE_MASK = 0x1,
+ NVME_FEAT_VWC_WCE_SHIFT = 0,
+ NVME_FEAT_VWC_WCE_MASK = 0x1,
+ NVME_FEAT_NRQS_NSQR_SHIFT = 0,
+ NVME_FEAT_NRQS_NSQR_MASK = 0xffff,
+ NVME_FEAT_NRQS_NCQR_SHIFT = 16,
+ NVME_FEAT_NRQS_NCQR_MASK = 0xffff,
+ NVME_FEAT_ICOAL_THR_SHIFT = 0,
+ NVME_FEAT_ICOAL_THR_MASK = 0xff,
+ NVME_FEAT_ICOAL_TIME_SHIFT = 8,
+ NVME_FEAT_ICOAL_TIME_MASK = 0xff,
+ NVME_FEAT_ICFG_IV_SHIFT = 0,
+ NVME_FEAT_ICFG_IV_MASK = 0xffff,
+ NVME_FEAT_ICFG_CD_SHIFT = 16,
+ NVME_FEAT_ICFG_CD_MASK = 0x1,
+ NVME_FEAT_WA_DN_SHIFT = 0,
+ NVME_FEAT_WA_DN_MASK = 0x1,
+ NVME_FEAT_AE_SMART_SHIFT = 0,
+ NVME_FEAT_AE_SMART_MASK = 0xff,
+ NVME_FEAT_AE_NAN_SHIFT = 8,
+ NVME_FEAT_AE_NAN_MASK = 0x1,
+ NVME_FEAT_AE_FW_SHIFT = 9,
+ NVME_FEAT_AE_FW_MASK = 0x1,
+ NVME_FEAT_AE_TELEM_SHIFT = 10,
+ NVME_FEAT_AE_TELEM_MASK = 0x1,
+ NVME_FEAT_AE_ANA_SHIFT = 11,
+ NVME_FEAT_AE_ANA_MASK = 0x1,
+ NVME_FEAT_AE_PLA_SHIFT = 12,
+ NVME_FEAT_AE_PLA_MASK = 0x1,
+ NVME_FEAT_AE_LBAS_SHIFT = 13,
+ NVME_FEAT_AE_LBAS_MASK = 0x1,
+ NVME_FEAT_AE_EGA_SHIFT = 14,
+ NVME_FEAT_AE_EGA_MASK = 0x1,
+ NVME_FEAT_APST_APSTE_SHIFT = 0,
+ NVME_FEAT_APST_APSTE_MASK = 0x1,
+ NVME_FEAT_HMEM_EHM_SHIFT = 0,
+ NVME_FEAT_HMEM_EHM_MASK = 0x1,
+ NVME_FEAT_HCTM_TMT2_SHIFT = 0,
+ NVME_FEAT_HCTM_TMT2_MASK = 0xffff,
+ NVME_FEAT_HCTM_TMT1_SHIFT = 16,
+ NVME_FEAT_HCTM_TMT1_MASK = 0xffff,
+ NVME_FEAT_NOPS_NOPPME_SHIFT = 0,
+ NVME_FEAT_NOPS_NOPPME_MASK = 0x1,
+ NVME_FEAT_RRL_RRL_SHIFT = 0,
+ NVME_FEAT_RRL_RRL_MASK = 0xff,
+ NVME_FEAT_PLM_PLME_SHIFT = 0,
+ NVME_FEAT_PLM_PLME_MASK = 0x1,
+ NVME_FEAT_PLMW_WS_SHIFT = 0,
+ NVME_FEAT_PLMW_WS_MASK = 0x7,
+ NVME_FEAT_LBAS_LSIRI_SHIFT = 0,
+ NVME_FEAT_LBAS_LSIRI_MASK = 0xffff,
+ NVME_FEAT_LBAS_LSIPI_SHIFT = 16,
+ NVME_FEAT_LBAS_LSIPI_MASK = 0xffff,
+ NVME_FEAT_SC_NODRM_SHIFT = 0,
+ NVME_FEAT_SC_NODRM_MASK = 0x1,
+ NVME_FEAT_EG_ENDGID_SHIFT = 0,
+ NVME_FEAT_EG_ENDGID_MASK = 0xffff,
+ NVME_FEAT_EG_EGCW_SHIFT = 16,
+ NVME_FEAT_EG_EGCW_MASK = 0xff,
+ NVME_FEAT_SPM_PBSLC_SHIFT = 0,
+ NVME_FEAT_SPM_PBSLC_MASK = 0xff,
+ NVME_FEAT_HOSTID_EXHID_SHIFT = 0,
+ NVME_FEAT_HOSTID_EXHID_MASK = 0x1,
+ NVME_FEAT_RM_REGPRE_SHIFT = 1,
+ NVME_FEAT_RM_REGPRE_MASK = 0x1,
+ NVME_FEAT_RM_RESREL_SHIFT = 2,
+ NVME_FEAT_RM_RESREL_MASK = 0x1,
+ NVME_FEAT_RM_RESPRE_SHIFT = 0x3,
+ NVME_FEAT_RM_RESPRE_MASK = 0x1,
+ NVME_FEAT_RP_PTPL_SHIFT = 0,
+ NVME_FEAT_RP_PTPL_MASK = 0x1,
+ NVME_FEAT_WP_WPS_SHIFT = 0,
+ NVME_FEAT_WP_WPS_MASK = 0x7,
+ NVME_FEAT_IOCSP_IOCSCI_SHIFT = 0,
+ NVME_FEAT_IOCSP_IOCSCI_MASK = 0xff,
+};
+
+#define NVME_FEAT_ARB_BURST(v) NVME_GET(FEAT_ARB_BURST, v)
+#define NVME_FEAT_ARB_LPW(v) NVME_GET(FEAT_ARB_LPW, v)
+#define NVME_FEAT_ARB_MPW(v) NVME_GET(FEAT_ARB_MPW, v)
+#define NVME_FEAT_ARB_HPW(v) NVME_GET(FEAT_ARB_HPW, v)
+
+inline void nvme_feature_decode_arbitration(__u32 value, __u8 *ab, __u8 *lpw,
+ __u8 *mpw, __u8 *hpw)
+{
+ *ab = NVME_FEAT_ARB_BURST(value);
+ *lpw = NVME_FEAT_ARB_LPW(value);
+ *mpw = NVME_FEAT_ARB_MPW(value);
+ *hpw = NVME_FEAT_ARB_HPW(value);
+};
+
+#define NVME_FEAT_PM_PS(v) NVME_GET(FEAT_PM_PS, v)
+#define NVME_FEAT_PM_WH(v) NVME_GET(FEAT_PM_WH, v)
+
+inline void nvme_feature_decode_power_mgmt(__u32 value, __u8 *ps, __u8 *wh)
+{
+ *ps = NVME_FEAT_PM_PS(value);
+ *wh = NVME_FEAT_PM_WH(value);
+}
+
+#define NVME_FEAT_LBAR_NR(v) NVME_GET(FEAT_LBAR_NR, v)
+
+inline void nvme_feature_decode_lba_range(__u32 value, __u8 *num)
+{
+ *num = NVME_FEAT_LBAR_NR(value);
+}
+
+#define NVME_FEAT_TT_TMPTH(v) NVME_GET(FEAT_TT_TMPTH, v)
+#define NVME_FEAT_TT_TMPSEL(v) NVME_GET(FEAT_TT_TMPSEL, v)
+#define NVME_FEAT_TT_THSEL(v) NVME_GET(FEAT_TT_THSEL, v)
+
+inline void nvme_feature_decode_temp_threshold(__u32 value, __u16 *tmpth,
+ __u8 *tmpsel, __u8 *thsel)
+{
+ *tmpth = NVME_FEAT_TT_TMPTH(value);
+ *tmpsel = NVME_FEAT_TT_TMPSEL(value);
+ *thsel = NVME_FEAT_TT_THSEL(value);
+}
+
+#define NVME_FEAT_ER_TLER(v) NVME_GET(FEAT_ER_TLER, v)
+#define NVME_FEAT_ER_DULBE(v) NVME_GET(FEAT_ER_DULBE, v)
+
+inline void nvme_feature_decode_error_recovery(__u32 value, __u16 *tler, bool *dulbe)
+{
+ *tler = NVME_FEAT_ER_TLER(value);
+ *dulbe = NVME_FEAT_ER_DULBE(value);
+}
+
+#define NVME_FEAT_VWC_WCE(v) NVME_GET(FEAT_VWC_WCE, v)
+
+inline void nvme_feature_decode_volatile_write_cache(__u32 value, bool *wce)
+{
+ *wce = NVME_FEAT_VWC_WCE(value);
+}
+
+#define NVME_FEAT_NRQS_NSQR(v) NVME_GET(FEAT_NRQS_NSQR, v)
+#define NVME_FEAT_NRQS_NCQR(v) NVME_GET(FEAT_NRQS_NCQR, v)
+
+inline void nvme_feature_decode_number_of_queues(__u32 value, __u16 *nsqr, __u16 *ncqr)
+{
+ *nsqr = NVME_FEAT_NRQS_NSQR(value);
+ *ncqr = NVME_FEAT_NRQS_NCQR(value);
+}
+
+#define NVME_FEAT_ICOAL_THR(v) NVME_GET(FEAT_ICOAL_THR, v)
+#define NVME_FEAT_ICOAL_TIME(v) NVME_GET(FEAT_ICOAL_TIME, v)
+
+inline void nvme_feature_decode_interrupt_coalescing(__u32 value, __u8 *thr, __u8 *time)
+{
+ *thr = NVME_FEAT_ICOAL_THR(value);
+ *time = NVME_FEAT_ICOAL_TIME(value);
+}
+
+#define NVME_FEAT_ICFG_IV(v) NVME_GET(FEAT_ICFG_IV, v)
+#define NVME_FEAT_ICFG_CD(v) NVME_GET(FEAT_ICFG_CD, v)
+
+inline void nvme_feature_decode_interrupt_config(__u32 value, __u16 *iv, bool *cd)
+{
+ *iv = NVME_FEAT_ICFG_IV(value);
+ *cd = NVME_FEAT_ICFG_CD(value);
+}
+
+#define NVME_FEAT_WA_DN(v) NVME_GET(FEAT_WA_DN, v)
+
+inline void nvme_feature_decode_write_atomicity(__u32 value, bool *dn)
+{
+ *dn = NVME_FEAT_WA_DN(value);
+}
+
+#define NVME_FEAT_AE_SMART(v) NVME_GET(FEAT_AE_SMART, v)
+#define NVME_FEAT_AE_NAN(v) NVME_GET(FEAT_AE_NAN, v)
+#define NVME_FEAT_AE_FW(v) NVME_GET(FEAT_AE_FW, v)
+#define NVME_FEAT_AE_TELEM(v) NVME_GET(FEAT_AE_TELEM, v)
+#define NVME_FEAT_AE_ANA(v) NVME_GET(FEAT_AE_ANA, v)
+#define NVME_FEAT_AE_PLA(v) NVME_GET(FEAT_AE_PLA, v)
+#define NVME_FEAT_AE_LBAS(v) NVME_GET(FEAT_AE_LBAS, v)
+#define NVME_FEAT_AE_EGA(v) NVME_GET(FEAT_AE_EGA, v)
+
+inline void nvme_feature_decode_async_event_config(__u32 value, __u8 *smart,
+ bool *nan, bool *fw, bool *telem, bool *ana, bool *pla, bool *lbas,
+ bool *ega)
+{
+ *smart = NVME_FEAT_AE_SMART(value);
+ *nan = NVME_FEAT_AE_NAN(value);
+ *fw = NVME_FEAT_AE_FW(value);
+ *telem = NVME_FEAT_AE_TELEM(value);
+ *ana = NVME_FEAT_AE_ANA(value);
+ *pla = NVME_FEAT_AE_PLA(value);
+ *lbas = NVME_FEAT_AE_LBAS(value);
+ *ega = NVME_FEAT_AE_EGA(value);
+}
+
+#define NVME_FEAT_APST_APSTE(v) NVME_GET(FEAT_APST_APSTE, v)
+
+inline void nvme_feature_decode_auto_power_state(__u32 value, bool *apste)
+{
+ *apste = NVME_FEAT_APST_APSTE(value);
+}
+
+#define NVME_FEAT_HMEM_EHM(v) NVME_GET(FEAT_HMEM_EHM, v)
+
+inline void nvme_feature_decode_host_memory_buffer(__u32 value, bool *ehm)
+{
+ *ehm = NVME_FEAT_HMEM_EHM(value);
+}
+
+#define NVME_FEAT_HCTM_TMT2(v) NVME_GET(FEAT_HCTM_TMT2, v)
+#define NVME_FEAT_HCTM_TMT1(v) NVME_GET(FEAT_HCTM_TMT1, v)
+
+inline void nvme_feature_decode_host_thermal_mgmt(__u32 value, __u16 *tmt2, __u16 *tmt1)
+{
+ *tmt2 = NVME_FEAT_HCTM_TMT2(value);
+ *tmt1 = NVME_FEAT_HCTM_TMT1(value);
+}
+
+#define NVME_FEAT_NOPS_NOPPME(v) NVME_GET(FEAT_NOPS_NOPPME, v)
+
+inline void nvme_feature_decode_non_op_power_config(__u32 value, bool *noppme)
+{
+ *noppme = NVME_FEAT_NOPS_NOPPME(value);
+}
+
+#define NVME_FEAT_RRL_RRL(v) NVME_GET(FEAT_RRL_RRL, v)
+
+inline void nvme_feature_decode_read_recovery_level_config(__u32 value, __u8 *rrl)
+{
+ *rrl = NVME_FEAT_RRL_RRL(value);
+}
+
+#define NVME_FEAT_PLM_PLME(v) NVME_GET(FEAT_PLM_PLME, v)
+
+inline void nvme_feature_decode_predictable_latency_mode_config(__u32 value, bool *plme)
+{
+ *plme = NVME_FEAT_PLM_PLME(value);
+}
+
+#define NVME_FEAT_PLMW_WS(v) NVME_GET(FEAT_PLMW_WS, v)
+
+inline void nvme_feature_decode_predictable_latency_mode_window(__u32 value, __u8 *ws)
+{
+ *ws = NVME_FEAT_PLMW_WS(value);
+}
+
+#define NVME_FEAT_LBAS_LSIRI(v) NVME_GET(FEAT_LBAS_LSIRI, v)
+#define NVME_FEAT_LBAS_LSIPI(v) NVME_GET(FEAT_LBAS_LSIPI, v)
+
+inline void nvme_feature_decode_lba_status_attributes(__u32 value, __u16 *lsiri, __u16 *lsipi)
+{
+ *lsiri = NVME_FEAT_LBAS_LSIRI(value);
+ *lsipi = NVME_FEAT_LBAS_LSIPI(value);
+}
+
+#define NVME_FEAT_SC_NODRM(v) NVME_GET(FEAT_SC_NODRM, v)
+
+inline void nvme_feature_decode_sanitize_config(__u32 value, bool *nodrm)
+{
+ *nodrm = NVME_FEAT_SC_NODRM(value);
+}
+
+#define NVME_FEAT_EG_ENDGID(v) NVME_GET(FEAT_EG_ENDGID, v)
+#define NVME_FEAT_EG_EGCW(v) NVME_GET(FEAT_EG_EGCW, v)
+
+inline void nvme_feature_decode_endurance_group_event_config(__u32 value,
+ __u16 *endgid, __u8 *endgcw)
+{
+ *endgid = NVME_FEAT_EG_ENDGID(value);
+ *endgcw = NVME_FEAT_EG_EGCW(value);
+}
+
+#define NVME_FEAT_SPM_PBSLC(v) NVME_GET(FEAT_SPM_PBSLC, v)
+
+inline void nvme_feature_decode_software_progress_marker(__u32 value, __u8 *pbslc)
+{
+ *pbslc = NVME_FEAT_SPM_PBSLC(value);
+}
+
+#define NVME_FEAT_HOSTID_EXHID(v) NVME_GET(FEAT_HOSTID_EXHID, v)
+
+inline void nvme_feature_decode_host_identifier(__u32 value, bool *exhid)
+{
+ *exhid = NVME_FEAT_HOSTID_EXHID(value);
+}
+
+#define NVME_FEAT_RM_REGPRE(v) NVME_GET(FEAT_RM_REGPRE, v)
+#define NVME_FEAT_RM_RESREL(v) NVME_GET(FEAT_RM_RESREL, v)
+#define NVME_FEAT_RM_RESPRE(v) NVME_GET(FEAT_RM_RESPRE, v)
+
+inline void nvme_feature_decode_reservation_notification(__u32 value, bool *regpre,
+ bool *resrel, bool *respre)
+{
+ *regpre = NVME_FEAT_RM_REGPRE(value);
+ *resrel = NVME_FEAT_RM_RESREL(value);
+ *respre = NVME_FEAT_RM_RESPRE(value);
+}
+
+#define NVME_FEAT_RP_PTPL(v) NVME_GET(FEAT_RP_PTPL, v)
+
+inline void nvme_feature_decode_reservation_persistance(__u32 value, bool *ptpl)
+{
+ *ptpl = NVME_FEAT_RP_PTPL(value);
+}
+
+#define NVME_FEAT_WP_WPS(v) NVME_GET(FEAT_WP_WPS, v)
+
+inline void nvme_feature_decode_namespace_write_protect(__u32 value, __u8 *wps)
+{
+ *wps = NVME_FEAT_WP_WPS(value);
+}
#endif /* _LIBNVME_UTIL_H */