]> www.infradead.org Git - users/jedix/linux-maple.git/commitdiff
dtrace: support x86 CPUs with SMAP
authorNick Alcock <nick.alcock@oracle.com>
Tue, 30 May 2017 12:51:48 +0000 (13:51 +0100)
committerNick Alcock <nick.alcock@oracle.com>
Fri, 2 Jun 2017 12:05:24 +0000 (13:05 +0100)
We need to call STAC and CLAC at appropriate times, or the CPU will
fault us for accessing userspace without permission.

Signed-off-by: Nick Alcock <nick.alcock@oracle.com>
Acked-by: Kris Van Hees <kris.van.hees@oracle.com>
Orabug: 26166784

dtrace/dtrace_asm_x86_64.S

index e84190d364f13a32685df7096341809e72917278..c8b6977d8ea1609d37e42a5ff004c85dd20a87f7 100644 (file)
  * CDDL HEADER END
  */
 /*
- * Copyright 2010-2014 Oracle, Inc.  All rights reserved.
+ * Copyright 2010-2017 Oracle, Inc.  All rights reserved.
  * Use is subject to license terms.
  */
 
 #include <linux/linkage.h>
+#include <asm/smap.h>
 
 #define CPU_DTRACE_BADADDR     0x0004  /* DTrace fault: bad address */
 
        pushq   %rbp
        movq    %rsp, %rbp
 
+       ASM_STAC
        xchgq   %rdi, %rsi              # make %rsi source, %rdi dest
        movq    %rdx, %rcx              # load count
        repz                            # repeat for count ...
        smovb                           #   move from %ds:rsi to %ed:rdi
+       ASM_CLAC
        leave
        ret
        ENDPROC(dtrace_copy)
        pushq   %rbp
        movq    %rsp, %rbp
 
+       ASM_STAC
 0:
        movb    (%rdi), %al             # load from source
        movb    %al, (%rsi)             # store to destination
        cmpq    $0, %rdx
        jne     0b
 2:
+       ASM_CLAC
        leave
        ret
 
 #if defined(__x86_64__)
 
        ENTRY(dtrace_fulword)
+       ASM_STAC
        movq    (%rdi), %rax
+       ASM_CLAC
        ret
        ENDPROC(dtrace_fulword)
 
 
        ENTRY(dtrace_fuword8_nocheck)
        xorq    %rax, %rax
+       ASM_STAC
        movb    (%rdi), %al
+       ASM_CLAC
        ret
        ENDPROC(dtrace_fuword8_nocheck)
 
 
        ENTRY(dtrace_fuword16_nocheck)
        xorq    %rax, %rax
+       ASM_STAC
        movw    (%rdi), %ax
+       ASM_CLAC
        ret
        ENDPROC(dtrace_fuword16_nocheck)
 
 
        ENTRY(dtrace_fuword32_nocheck)
        xorq    %rax, %rax
+       ASM_STAC
        movl    (%rdi), %eax
+       ASM_CLAC
        ret
        ENDPROC(dtrace_fuword32_nocheck)
 
 #if defined(__x86_64__)
 
        ENTRY(dtrace_fuword64_nocheck)
+       ASM_STAC
        movq    (%rdi), %rax
+       ASM_CLAC
        ret
        ENDPROC(dtrace_fuword64_nocheck)