From: Marcelo Tosatti Date: Mon, 20 Mar 2023 18:03:35 +0000 (-0300) Subject: this_cpu_cmpxchg: loongarch: switch this_cpu_cmpxchg to locked, add _local function X-Git-Url: https://www.infradead.org/git/?a=commitdiff_plain;h=5d681e19c5f1f5bf5f1f78080b61cecdfeefe126;p=users%2Fjedix%2Flinux-maple.git this_cpu_cmpxchg: loongarch: switch this_cpu_cmpxchg to locked, add _local function Goal is to have vmstat_shepherd to transfer from per-CPU counters to global counters remotely. For this, an atomic this_cpu_cmpxchg is necessary. Following the kernel convention for cmpxchg/cmpxchg_local, add this_cpu_cmpxchg_local helpers to Loongarch. Link: https://lkml.kernel.org/r/20230320180745.607294360@redhat.com Signed-off-by: Marcelo Tosatti Cc: Aaron Tomlin Cc: Christoph Lameter Cc: Frederic Weisbecker Cc: Heiko Carstens Cc: Huacai Chen Cc: Michal Hocko Cc: Peter Xu Cc: "Russell King (Oracle)" Cc: Vlastimil Babka Signed-off-by: Andrew Morton --- diff --git a/arch/loongarch/include/asm/percpu.h b/arch/loongarch/include/asm/percpu.h index ad8d88494554..901727df7cbe 100644 --- a/arch/loongarch/include/asm/percpu.h +++ b/arch/loongarch/include/asm/percpu.h @@ -150,6 +150,16 @@ static inline unsigned long __percpu_xchg(void *ptr, unsigned long val, } /* this_cpu_cmpxchg */ +#define _protect_cmpxchg(pcp, o, n) \ +({ \ + typeof(*raw_cpu_ptr(&(pcp))) __ret; \ + preempt_disable_notrace(); \ + __ret = cmpxchg(raw_cpu_ptr(&(pcp)), o, n); \ + preempt_enable_notrace(); \ + __ret; \ +}) + +/* this_cpu_cmpxchg_local */ #define _protect_cmpxchg_local(pcp, o, n) \ ({ \ typeof(*raw_cpu_ptr(&(pcp))) __ret; \ @@ -222,10 +232,15 @@ do { \ #define this_cpu_xchg_4(pcp, val) _percpu_xchg(pcp, val) #define this_cpu_xchg_8(pcp, val) _percpu_xchg(pcp, val) -#define this_cpu_cmpxchg_1(ptr, o, n) _protect_cmpxchg_local(ptr, o, n) -#define this_cpu_cmpxchg_2(ptr, o, n) _protect_cmpxchg_local(ptr, o, n) -#define this_cpu_cmpxchg_4(ptr, o, n) _protect_cmpxchg_local(ptr, o, n) -#define this_cpu_cmpxchg_8(ptr, o, n) _protect_cmpxchg_local(ptr, o, n) +#define this_cpu_cmpxchg_local_1(ptr, o, n) _protect_cmpxchg_local(ptr, o, n) +#define this_cpu_cmpxchg_local_2(ptr, o, n) _protect_cmpxchg_local(ptr, o, n) +#define this_cpu_cmpxchg_local_4(ptr, o, n) _protect_cmpxchg_local(ptr, o, n) +#define this_cpu_cmpxchg_local_8(ptr, o, n) _protect_cmpxchg_local(ptr, o, n) + +#define this_cpu_cmpxchg_1(ptr, o, n) _protect_cmpxchg(ptr, o, n) +#define this_cpu_cmpxchg_2(ptr, o, n) _protect_cmpxchg(ptr, o, n) +#define this_cpu_cmpxchg_4(ptr, o, n) _protect_cmpxchg(ptr, o, n) +#define this_cpu_cmpxchg_8(ptr, o, n) _protect_cmpxchg(ptr, o, n) #include