config HAVE_CALL_THUNKS
        def_bool y
-       depends on CC_HAS_ENTRY_PADDING && RETHUNK && OBJTOOL
+       depends on CC_HAS_ENTRY_PADDING && MITIGATION_RETHUNK && OBJTOOL
 
 config CALL_THUNKS
        def_bool n
          branches. Requires a compiler with -mindirect-branch=thunk-extern
          support for full protection. The kernel may run slower.
 
-config RETHUNK
+config MITIGATION_RETHUNK
        bool "Enable return-thunks"
        depends on MITIGATION_RETPOLINE && CC_HAS_RETURN_THUNK
        select OBJTOOL if HAVE_OBJTOOL
 
 config MITIGATION_UNRET_ENTRY
        bool "Enable UNRET on kernel entry"
-       depends on CPU_SUP_AMD && RETHUNK && X86_64
+       depends on CPU_SUP_AMD && MITIGATION_RETHUNK && X86_64
        default y
        help
          Compile the kernel with support for the retbleed=unret mitigation.
 
 config MITIGATION_SRSO
        bool "Mitigate speculative RAS overflow on AMD"
-       depends on CPU_SUP_AMD && X86_64 && RETHUNK
+       depends on CPU_SUP_AMD && X86_64 && MITIGATION_RETHUNK
        default y
        help
          Enable the SRSO mitigation needed on AMD Zen1-4 machines.
 
 endif
 RETPOLINE_CFLAGS       += $(call cc-option,-mindirect-branch-cs-prefix)
 
-ifdef CONFIG_RETHUNK
+ifdef CONFIG_MITIGATION_RETHUNK
 RETHUNK_CFLAGS         := -mfunction-return=thunk-extern
 RETPOLINE_CFLAGS       += $(RETHUNK_CFLAGS)
 endif
 
 CONFIG_HZ_1000=y
 CONFIG_KEXEC=y
 CONFIG_CRASH_DUMP=y
-# CONFIG_RETHUNK is not set
+# CONFIG_MITIGATION_RETHUNK is not set
 CONFIG_HIBERNATION=y
 CONFIG_PM_DEBUG=y
 CONFIG_PM_TRACE_RTC=y
 
                                 (1 << (X86_FEATURE_RETPOLINE_LFENCE & 31)))
 #endif
 
-#ifdef CONFIG_RETHUNK
+#ifdef CONFIG_MITIGATION_RETHUNK
 # define DISABLE_RETHUNK       0
 #else
 # define DISABLE_RETHUNK       (1 << (X86_FEATURE_RETHUNK & 31))
 
 
 #ifdef __ASSEMBLY__
 
-#if defined(CONFIG_RETHUNK) && !defined(__DISABLE_EXPORTS) && !defined(BUILD_VDSO)
+#if defined(CONFIG_MITIGATION_RETHUNK) && !defined(__DISABLE_EXPORTS) && !defined(BUILD_VDSO)
 #define RET    jmp __x86_return_thunk
 #else /* CONFIG_MITIGATION_RETPOLINE */
 #ifdef CONFIG_MITIGATION_SLS
 
 #else /* __ASSEMBLY__ */
 
-#if defined(CONFIG_RETHUNK) && !defined(__DISABLE_EXPORTS) && !defined(BUILD_VDSO)
+#if defined(CONFIG_MITIGATION_RETHUNK) && !defined(__DISABLE_EXPORTS) && !defined(BUILD_VDSO)
 #define ASM_RET        "jmp __x86_return_thunk\n\t"
 #else /* CONFIG_MITIGATION_RETPOLINE */
 #ifdef CONFIG_MITIGATION_SLS
 
  * where we have a stack but before any RET instruction.
  */
 .macro __UNTRAIN_RET ibpb_feature, call_depth_insns
-#if defined(CONFIG_RETHUNK) || defined(CONFIG_MITIGATION_IBPB_ENTRY)
+#if defined(CONFIG_MITIGATION_RETHUNK) || defined(CONFIG_MITIGATION_IBPB_ENTRY)
        VALIDATE_UNRET_END
        ALTERNATIVE_3 "",                                               \
                      CALL_UNTRAIN_RET, X86_FEATURE_UNRET,              \
 extern retpoline_thunk_t __x86_indirect_call_thunk_array[];
 extern retpoline_thunk_t __x86_indirect_jump_thunk_array[];
 
-#ifdef CONFIG_RETHUNK
+#ifdef CONFIG_MITIGATION_RETHUNK
 extern void __x86_return_thunk(void);
 #else
 static inline void __x86_return_thunk(void) {}
 
 #define ARCH_DEFINE_STATIC_CALL_TRAMP(name, func)                      \
        __ARCH_DEFINE_STATIC_CALL_TRAMP(name, ".byte 0xe9; .long " #func " - (. + 4)")
 
-#ifdef CONFIG_RETHUNK
+#ifdef CONFIG_MITIGATION_RETHUNK
 #define ARCH_DEFINE_STATIC_CALL_NULL_TRAMP(name)                       \
        __ARCH_DEFINE_STATIC_CALL_TRAMP(name, "jmp __x86_return_thunk")
 #else
 
        }
 }
 
-#ifdef CONFIG_RETHUNK
+#ifdef CONFIG_MITIGATION_RETHUNK
 
 /*
  * Rewrite the compiler generated return thunk tail-calls.
 }
 #else
 void __init_or_module noinline apply_returns(s32 *start, s32 *end) { }
-#endif /* CONFIG_RETHUNK */
+#endif /* CONFIG_MITIGATION_RETHUNK */
 
 #else /* !CONFIG_MITIGATION_RETPOLINE || !CONFIG_OBJTOOL */
 
 
 }
 EXPORT_SYMBOL_GPL(arch_static_call_transform);
 
-#ifdef CONFIG_RETHUNK
+#ifdef CONFIG_MITIGATION_RETHUNK
 /*
  * This is called by apply_returns() to fix up static call trampolines,
  * specifically ARCH_DEFINE_STATIC_CALL_NULL_TRAMP which is recorded as
 
 #undef GEN
 #endif
 
-#ifdef CONFIG_RETHUNK
+#ifdef CONFIG_MITIGATION_RETHUNK
 
 /*
  * Be careful here: that label cannot really be removed because in
 SYM_CODE_END(__x86_return_thunk)
 EXPORT_SYMBOL(__x86_return_thunk)
 
-#endif /* CONFIG_RETHUNK */
+#endif /* CONFIG_MITIGATION_RETHUNK */
 
 endif
 objtool-args-$(CONFIG_UNWINDER_ORC)                    += --orc
 objtool-args-$(CONFIG_MITIGATION_RETPOLINE)            += --retpoline
-objtool-args-$(CONFIG_RETHUNK)                         += --rethunk
+objtool-args-$(CONFIG_MITIGATION_RETHUNK)              += --rethunk
 objtool-args-$(CONFIG_MITIGATION_SLS)                  += --sls
 objtool-args-$(CONFIG_STACK_VALIDATION)                        += --stackval
 objtool-args-$(CONFIG_HAVE_STATIC_CALL_INLINE)         += --static-call
 
                                 (1 << (X86_FEATURE_RETPOLINE_LFENCE & 31)))
 #endif
 
-#ifdef CONFIG_RETHUNK
+#ifdef CONFIG_MITIGATION_RETHUNK
 # define DISABLE_RETHUNK       0
 #else
 # define DISABLE_RETHUNK       (1 << (X86_FEATURE_RETHUNK & 31))
 
 
                if (insn->type == INSN_RETURN) {
                        if (opts.rethunk) {
-                               WARN_INSN(insn, "'naked' return found in RETHUNK build");
+                               WARN_INSN(insn, "'naked' return found in MITIGATION_RETHUNK build");
                        } else
                                continue;
                } else {