/* TSC emulation after migration */
 #define HV_X64_MSR_REENLIGHTENMENT_CONTROL     0x40000106
 
+/* Nested features (CPUID 0x4000000A) EAX */
+#define HV_X64_NESTED_MSR_BITMAP               BIT(19)
+
 struct hv_reenlightenment_control {
        __u64 vector:8;
        __u64 reserved1:8;
        u32 hv_clean_fields;
        u32 hv_padding_32;
        u32 hv_synthetic_controls;
-       u32 hv_enlightenments_control;
+       struct {
+               u32 nested_flush_hypercall:1;
+               u32 msr_bitmap:1;
+               u32 reserved:30;
+       } hv_enlightenments_control;
        u32 hv_vp_id;
 
        u64 hv_vm_id;
 
        return *(u16 *)((char *)current_evmcs + offset);
 }
 
+static inline void evmcs_touch_msr_bitmap(void)
+{
+       if (unlikely(!current_evmcs))
+               return;
+
+       if (current_evmcs->hv_enlightenments_control.msr_bitmap)
+               current_evmcs->hv_clean_fields &=
+                       ~HV_VMX_ENLIGHTENED_CLEAN_FIELD_MSR_BITMAP;
+}
+
 static void evmcs_load(u64 phys_addr)
 {
        struct hv_vp_assist_page *vp_ap =
 static inline u16 evmcs_read16(unsigned long field) { return 0; }
 static inline void evmcs_load(u64 phys_addr) {}
 static inline void evmcs_sanitize_exec_ctrls(struct vmcs_config *vmcs_conf) {}
+static inline void evmcs_touch_msr_bitmap(void) {}
 #endif /* IS_ENABLED(CONFIG_HYPERV) */
 
 static inline bool is_exception_n(u32 intr_info, u8 vector)
                if (!loaded_vmcs->msr_bitmap)
                        goto out_vmcs;
                memset(loaded_vmcs->msr_bitmap, 0xff, PAGE_SIZE);
+
+               if (static_branch_unlikely(&enable_evmcs) &&
+                   (ms_hyperv.nested_features & HV_X64_NESTED_MSR_BITMAP)) {
+                       struct hv_enlightened_vmcs *evmcs =
+                               (struct hv_enlightened_vmcs *)loaded_vmcs->vmcs;
+
+                       evmcs->hv_enlightenments_control.msr_bitmap = 1;
+               }
        }
        return 0;
 
        if (!cpu_has_vmx_msr_bitmap())
                return;
 
+       if (static_branch_unlikely(&enable_evmcs))
+               evmcs_touch_msr_bitmap();
+
        /*
         * See Intel PRM Vol. 3, 20.6.9 (MSR-Bitmap Address). Early manuals
         * have the write-low and read-high bitmap offsets the wrong way round.
        if (!cpu_has_vmx_msr_bitmap())
                return;
 
+       if (static_branch_unlikely(&enable_evmcs))
+               evmcs_touch_msr_bitmap();
+
        /*
         * See Intel PRM Vol. 3, 20.6.9 (MSR-Bitmap Address). Early manuals
         * have the write-low and read-high bitmap offsets the wrong way round.