#define PAGE_ATTR (_PAGE_PRESENT | _PAGE_RW | _PAGE_ACCESSED | _PAGE_DIRTY)
/*
- * The .text..relocate_kernel and .data..relocate_kernel sections are copied
- * into the control page, and the remainder of the page is used as the stack.
+ * The .data..relocate_kernel section (which includes the code which gets
+ * executed) is copied into the control page, and the remainder of the page
+ * is used as the stack.
*/
-
.section .data..relocate_kernel,"a";
-/* Minimal CPU state */
+
+ /* Minimal CPU state */
SYM_DATA_LOCAL(saved_rsp, .quad 0)
SYM_DATA_LOCAL(saved_cr0, .quad 0)
SYM_DATA_LOCAL(saved_cr3, .quad 0)
.skip 0x100, 0x00
SYM_DATA_END(kexec_debug_idt)
- .section .text..relocate_kernel,"ax";
+/*
+ * This code is linked into the data section of the kernel image, is copied
+ * into the kexec control page and never invoked in place. When it runs, all
+ * other CPUs are shut down for kexec. So this code is free to use indirect
+ * branch and bare returns, and doesn't need ORC unwinding data. Keep it in
+ * a data section even of the object file, to prevent objtool from having
+ * opinions about it.
+ */
.code64
SYM_CODE_START_NOALIGN(relocate_kernel)
- UNWIND_HINT_END_OF_STACK
- ANNOTATE_NOENDBR
/*
* %rdi indirection_page
* %rsi pa_control_page
/* jump to identity mapped page */
0: addq $identity_mapped - 0b, %rsi
subq $__relocate_kernel_start - 0b, %rsi
- ANNOTATE_RETPOLINE_SAFE
jmp *%rsi
SYM_CODE_END(relocate_kernel)
SYM_CODE_START_LOCAL_NOALIGN(identity_mapped)
- UNWIND_HINT_END_OF_STACK
/*
* %rdi indirection page
* %rdx start address
xorl %r14d, %r14d
xorl %r15d, %r15d
- ANNOTATE_UNRET_SAFE
ret
int3
/* push the existing entry point onto the callee's stack */
pushq %rdx
- ANNOTATE_RETPOLINE_SAFE
call *%rdx
/* get the re-entry point of the peer system */
/* Find start (and end) of this physical mapping of control page */
leaq (%rip), %r8
- ANNOTATE_NOENDBR
andq $PAGE_MASK, %r8
lea PAGE_SIZE(%r8), %rsp
movl $1, %r11d /* Ensure preserve_context flag is set */
0: addq $virtual_mapped - 0b, %rax
subq $__relocate_kernel_start - 0b, %rax
pushq %rax
- ANNOTATE_UNRET_SAFE
ret
int3
SYM_CODE_END(identity_mapped)
SYM_CODE_START_LOCAL_NOALIGN(virtual_mapped)
- UNWIND_HINT_END_OF_STACK
- ANNOTATE_NOENDBR // RET target, above
movq saved_rsp(%rip), %rsp
movq saved_cr4(%rip), %rax
movq %rax, %cr4
popq %r12
popq %rbp
popq %rbx
- ANNOTATE_UNRET_SAFE
ret
int3
SYM_CODE_END(virtual_mapped)
/* Do the copies */
SYM_CODE_START_LOCAL_NOALIGN(swap_pages)
- UNWIND_HINT_END_OF_STACK
/*
* %rdi indirection page
* %r11 preserve_context
lea PAGE_SIZE(%rax), %rsi
jmp .Lloop
.Ldone:
- ANNOTATE_UNRET_SAFE
ret
int3
SYM_CODE_END(swap_pages)
#define LSR 5 /* Line Status */
SYM_CODE_START_LOCAL_NOALIGN(pr_char_8250)
- UNWIND_HINT_FUNC
- ANNOTATE_NOENDBR
addw $LSR, %dx
xchg %al, %ah
.Lxmtrdy_loop:
xchg %al, %ah
outb %al, %dx
pr_char_null:
- ANNOTATE_NOENDBR
- ANNOTATE_UNRET_SAFE
ret
SYM_CODE_END(pr_char_8250)
SYM_CODE_START_LOCAL_NOALIGN(pr_char_8250_mmio32)
- UNWIND_HINT_FUNC
- ANNOTATE_NOENDBR
.Lxmtrdy_loop_mmio:
movb (LSR*4)(%rdx), %ah
testb $XMTRDY, %ah
.Lready_mmio:
movb %al, (%rdx)
- ANNOTATE_UNRET_SAFE
ret
SYM_CODE_END(pr_char_8250_mmio32)
/* Print the nybble in %bl, clobber %rax */
SYM_CODE_START_LOCAL_NOALIGN(pr_nybble)
- UNWIND_HINT_FUNC
movb %bl, %al
nop
andb $0x0f, %al
cmpb $0x3a, %al
jb 1f
addb $('a' - '0' - 10), %al
- ANNOTATE_RETPOLINE_SAFE
1: jmp *%rsi
SYM_CODE_END(pr_nybble)
SYM_CODE_START_LOCAL_NOALIGN(pr_qword)
- UNWIND_HINT_FUNC
movq $16, %rcx
1: rolq $4, %rbx
call pr_nybble
loop 1b
movb $'\n', %al
- ANNOTATE_RETPOLINE_SAFE
jmp *%rsi
SYM_CODE_END(pr_qword)
.macro print_reg a, b, c, d, r
movb $\a, %al
- ANNOTATE_RETPOLINE_SAFE
call *%rsi
movb $\b, %al
- ANNOTATE_RETPOLINE_SAFE
call *%rsi
movb $\c, %al
- ANNOTATE_RETPOLINE_SAFE
call *%rsi
movb $\d, %al
- ANNOTATE_RETPOLINE_SAFE
call *%rsi
movq \r, %rbx
call pr_qword
SYM_CODE_START_NOALIGN(kexec_debug_exc_vectors)
/* Each of these is 6 bytes. */
.macro vec_err exc
- UNWIND_HINT_ENTRY
. = kexec_debug_exc_vectors + (\exc * KEXEC_DEBUG_EXC_HANDLER_SIZE)
nop
nop
.endm
.macro vec_noerr exc
- UNWIND_HINT_ENTRY
. = kexec_debug_exc_vectors + (\exc * KEXEC_DEBUG_EXC_HANDLER_SIZE)
pushq $0
pushq $\exc
jmp exc_handler
.endm
- ANNOTATE_NOENDBR
vec_noerr 0 // #DE
vec_noerr 1 // #DB
vec_noerr 2 // #NMI
SYM_CODE_START_LOCAL_NOALIGN(exc_handler)
/* No need for ret mitigations during kexec */
- VALIDATE_UNRET_END
pushq %rax
pushq %rbx