val |= CPTR_EL2_TFP | CPTR_EL2_TZ;
                __activate_traps_fpsimd32(vcpu);
        }
+       if (cpus_have_final_cap(ARM64_SME))
+               val |= CPTR_EL2_TSM;
 
        write_sysreg(val, cptr_el2);
        write_sysreg(__this_cpu_read(kvm_hyp_vector), vbar_el2);
 
+       if (cpus_have_final_cap(ARM64_SME)) {
+               val = read_sysreg_s(SYS_HFGRTR_EL2);
+               val &= ~(HFGxTR_EL2_nTPIDR2_EL0_MASK |
+                        HFGxTR_EL2_nSMPRI_EL1_MASK);
+               write_sysreg_s(val, SYS_HFGRTR_EL2);
+
+               val = read_sysreg_s(SYS_HFGWTR_EL2);
+               val &= ~(HFGxTR_EL2_nTPIDR2_EL0_MASK |
+                        HFGxTR_EL2_nSMPRI_EL1_MASK);
+               write_sysreg_s(val, SYS_HFGWTR_EL2);
+       }
+
        if (cpus_have_final_cap(ARM64_WORKAROUND_SPECULATIVE_AT)) {
                struct kvm_cpu_context *ctxt = &vcpu->arch.ctxt;
 
 
        write_sysreg(this_cpu_ptr(&kvm_init_params)->hcr_el2, hcr_el2);
 
+       if (cpus_have_final_cap(ARM64_SME)) {
+               u64 val;
+
+               val = read_sysreg_s(SYS_HFGRTR_EL2);
+               val |= HFGxTR_EL2_nTPIDR2_EL0_MASK |
+                       HFGxTR_EL2_nSMPRI_EL1_MASK;
+               write_sysreg_s(val, SYS_HFGRTR_EL2);
+
+               val = read_sysreg_s(SYS_HFGWTR_EL2);
+               val |= HFGxTR_EL2_nTPIDR2_EL0_MASK |
+                       HFGxTR_EL2_nSMPRI_EL1_MASK;
+               write_sysreg_s(val, SYS_HFGWTR_EL2);
+       }
+
        cptr = CPTR_EL2_DEFAULT;
        if (vcpu_has_sve(vcpu) && (vcpu->arch.flags & KVM_ARM64_FP_ENABLED))
                cptr |= CPTR_EL2_TZ;
+       if (cpus_have_final_cap(ARM64_SME))
+               cptr &= ~CPTR_EL2_TSM;
 
        write_sysreg(cptr, cptr_el2);
        write_sysreg(__kvm_hyp_host_vector, vbar_el2);
 
 
        val = read_sysreg(cpacr_el1);
        val |= CPACR_EL1_TTA;
-       val &= ~(CPACR_EL1_ZEN_EL0EN | CPACR_EL1_ZEN_EL1EN);
+       val &= ~(CPACR_EL1_ZEN_EL0EN | CPACR_EL1_ZEN_EL1EN |
+                CPACR_EL1_SMEN_EL0EN | CPACR_EL1_SMEN_EL1EN);
 
        /*
         * With VHE (HCR.E2H == 1), accesses to CPACR_EL1 are routed to
                __activate_traps_fpsimd32(vcpu);
        }
 
+       if (cpus_have_final_cap(ARM64_SME))
+               write_sysreg(read_sysreg(sctlr_el2) & ~SCTLR_ELx_ENTP2,
+                            sctlr_el2);
+
        write_sysreg(val, cpacr_el1);
 
        write_sysreg(__this_cpu_read(kvm_hyp_vector), vbar_el1);
         */
        asm(ALTERNATIVE("nop", "isb", ARM64_WORKAROUND_SPECULATIVE_AT));
 
+       if (cpus_have_final_cap(ARM64_SME))
+               write_sysreg(read_sysreg(sctlr_el2) | SCTLR_ELx_ENTP2,
+                            sctlr_el2);
+
        write_sysreg(CPACR_EL1_DEFAULT, cpacr_el1);
 
        if (!arm64_kernel_unmapped_at_el0())