pushq %rdx; \
movl $MSR_IA32_SPEC_CTRL, %ecx; \
movl $0, %edx; \
- movl x86_spec_ctrl_base, %eax; \
+ movl PER_CPU_VAR(x86_spec_ctrl_restore), %eax; \
wrmsr; \
popq %rdx; \
popq %rcx; \
testl $SPEC_CTRL_IBRS_INUSE, PER_CPU_VAR(cpu_ibrs)
jz 13f
- testl $SPEC_CTRL_FEATURE_ENABLE_IBRS, \save_reg
- jnz 13f
+ cmp \save_reg, PER_CPU_VAR(x86_spec_ctrl_priv_cpu)
+ je 13f
movl $MSR_IA32_SPEC_CTRL, %ecx
movl $0, %edx
/* Defined in bugs_64.c */
extern u64 x86_spec_ctrl_priv;
DECLARE_PER_CPU(u64, x86_spec_ctrl_priv_cpu);
+DECLARE_PER_CPU(u64, x86_spec_ctrl_restore);
extern u64 x86_spec_ctrl_base;
/*
static inline void update_cpu_spec_ctrl(int cpu)
{
per_cpu(x86_spec_ctrl_priv_cpu, cpu) = x86_spec_ctrl_priv;
+ per_cpu(x86_spec_ctrl_restore, cpu) = x86_spec_ctrl_base;
}
static inline void update_cpu_spec_ctrl_all(void)
EXPORT_SYMBOL_GPL(x86_spec_ctrl_priv);
DEFINE_PER_CPU(u64, x86_spec_ctrl_priv_cpu) = 0;
EXPORT_PER_CPU_SYMBOL(x86_spec_ctrl_priv_cpu);
+DEFINE_PER_CPU(u64, x86_spec_ctrl_restore) = 0;
+EXPORT_PER_CPU_SYMBOL(x86_spec_ctrl_restore);
/*
* AMD specific MSR info for Speculative Store Bypass control.
{
u64 msr = x86_spec_ctrl_base | ssbd_tif_to_spec_ctrl(tifn);
+ this_cpu_write(x86_spec_ctrl_restore, msr);
wrmsrl(MSR_IA32_SPEC_CTRL, msr);
}