val |= FIELD_PREP(ARM64_FEATURE_MASK(ID_DFR0_EL1_PerfMon),
                                  pmuver_to_perfmon(vcpu_pmuver(vcpu)));
                break;
+       case SYS_ID_AA64MMFR2_EL1:
+               val &= ~ID_AA64MMFR2_EL1_CCIDX_MASK;
+               break;
+       case SYS_ID_MMFR4_EL1:
+               val &= ~ARM64_FEATURE_MASK(ID_MMFR4_EL1_CCIDX);
+               break;
        }
 
        return val;
 
        { SYS_DESC(SYS_CCSIDR_EL1), access_ccsidr },
        { SYS_DESC(SYS_CLIDR_EL1), access_clidr },
+       { SYS_DESC(SYS_CCSIDR2_EL1), undef_access },
        { SYS_DESC(SYS_SMIDR_EL1), undef_access },
        { SYS_DESC(SYS_CSSELR_EL1), access_csselr, reset_unknown, CSSELR_EL1 },
        { SYS_DESC(SYS_CTR_EL0), access_ctr },
 
        { Op1(1), CRn( 0), CRm( 0), Op2(0), access_ccsidr },
        { Op1(1), CRn( 0), CRm( 0), Op2(1), access_clidr },
+
+       /* CCSIDR2 */
+       { Op1(1), CRn( 0), CRm( 0),  Op2(2), undef_access },
+
        { Op1(2), CRn( 0), CRm( 0), Op2(0), access_csselr, NULL, CSSELR_EL1 },
 };