#define ATOMIC64_OP(op, asm_op)                                                \
 __LL_SC_INLINE void                                                    \
-__LL_SC_PREFIX(arch_atomic64_##op(long i, atomic64_t *v))              \
+__LL_SC_PREFIX(arch_atomic64_##op(s64 i, atomic64_t *v))               \
 {                                                                      \
-       long result;                                                    \
+       s64 result;                                                     \
        unsigned long tmp;                                              \
                                                                        \
        asm volatile("// atomic64_" #op "\n"                            \
 __LL_SC_EXPORT(arch_atomic64_##op);
 
 #define ATOMIC64_OP_RETURN(name, mb, acq, rel, cl, op, asm_op)         \
-__LL_SC_INLINE long                                                    \
-__LL_SC_PREFIX(arch_atomic64_##op##_return##name(long i, atomic64_t *v))\
+__LL_SC_INLINE s64                                                     \
+__LL_SC_PREFIX(arch_atomic64_##op##_return##name(s64 i, atomic64_t *v))\
 {                                                                      \
-       long result;                                                    \
+       s64 result;                                                     \
        unsigned long tmp;                                              \
                                                                        \
        asm volatile("// atomic64_" #op "_return" #name "\n"            \
 __LL_SC_EXPORT(arch_atomic64_##op##_return##name);
 
 #define ATOMIC64_FETCH_OP(name, mb, acq, rel, cl, op, asm_op)          \
-__LL_SC_INLINE long                                                    \
-__LL_SC_PREFIX(arch_atomic64_fetch_##op##name(long i, atomic64_t *v))  \
+__LL_SC_INLINE s64                                                     \
+__LL_SC_PREFIX(arch_atomic64_fetch_##op##name(s64 i, atomic64_t *v))   \
 {                                                                      \
-       long result, val;                                               \
+       s64 result, val;                                                \
        unsigned long tmp;                                              \
                                                                        \
        asm volatile("// atomic64_fetch_" #op #name "\n"                \
 #undef ATOMIC64_OP_RETURN
 #undef ATOMIC64_OP
 
-__LL_SC_INLINE long
+__LL_SC_INLINE s64
 __LL_SC_PREFIX(arch_atomic64_dec_if_positive(atomic64_t *v))
 {
-       long result;
+       s64 result;
        unsigned long tmp;
 
        asm volatile("// atomic64_dec_if_positive\n"
 
 
 #define __LL_SC_ATOMIC64(op)   __LL_SC_CALL(arch_atomic64_##op)
 #define ATOMIC64_OP(op, asm_op)                                                \
-static inline void arch_atomic64_##op(long i, atomic64_t *v)           \
+static inline void arch_atomic64_##op(s64 i, atomic64_t *v)            \
 {                                                                      \
-       register long x0 asm ("x0") = i;                                \
+       register s64 x0 asm ("x0") = i;                                 \
        register atomic64_t *x1 asm ("x1") = v;                         \
                                                                        \
        asm volatile(ARM64_LSE_ATOMIC_INSN(__LL_SC_ATOMIC64(op),        \
 #undef ATOMIC64_OP
 
 #define ATOMIC64_FETCH_OP(name, mb, op, asm_op, cl...)                 \
-static inline long arch_atomic64_fetch_##op##name(long i, atomic64_t *v)\
+static inline s64 arch_atomic64_fetch_##op##name(s64 i, atomic64_t *v) \
 {                                                                      \
-       register long x0 asm ("x0") = i;                                \
+       register s64 x0 asm ("x0") = i;                                 \
        register atomic64_t *x1 asm ("x1") = v;                         \
                                                                        \
        asm volatile(ARM64_LSE_ATOMIC_INSN(                             \
 #undef ATOMIC64_FETCH_OPS
 
 #define ATOMIC64_OP_ADD_RETURN(name, mb, cl...)                                \
-static inline long arch_atomic64_add_return##name(long i, atomic64_t *v)\
+static inline s64 arch_atomic64_add_return##name(s64 i, atomic64_t *v) \
 {                                                                      \
-       register long x0 asm ("x0") = i;                                \
+       register s64 x0 asm ("x0") = i;                                 \
        register atomic64_t *x1 asm ("x1") = v;                         \
                                                                        \
        asm volatile(ARM64_LSE_ATOMIC_INSN(                             \
 
 #undef ATOMIC64_OP_ADD_RETURN
 
-static inline void arch_atomic64_and(long i, atomic64_t *v)
+static inline void arch_atomic64_and(s64 i, atomic64_t *v)
 {
-       register long x0 asm ("x0") = i;
+       register s64 x0 asm ("x0") = i;
        register atomic64_t *x1 asm ("x1") = v;
 
        asm volatile(ARM64_LSE_ATOMIC_INSN(
 }
 
 #define ATOMIC64_FETCH_OP_AND(name, mb, cl...)                         \
-static inline long arch_atomic64_fetch_and##name(long i, atomic64_t *v)        \
+static inline s64 arch_atomic64_fetch_and##name(s64 i, atomic64_t *v)  \
 {                                                                      \
-       register long x0 asm ("x0") = i;                                \
+       register s64 x0 asm ("x0") = i;                                 \
        register atomic64_t *x1 asm ("x1") = v;                         \
                                                                        \
        asm volatile(ARM64_LSE_ATOMIC_INSN(                             \
 
 #undef ATOMIC64_FETCH_OP_AND
 
-static inline void arch_atomic64_sub(long i, atomic64_t *v)
+static inline void arch_atomic64_sub(s64 i, atomic64_t *v)
 {
-       register long x0 asm ("x0") = i;
+       register s64 x0 asm ("x0") = i;
        register atomic64_t *x1 asm ("x1") = v;
 
        asm volatile(ARM64_LSE_ATOMIC_INSN(
 }
 
 #define ATOMIC64_OP_SUB_RETURN(name, mb, cl...)                                \
-static inline long arch_atomic64_sub_return##name(long i, atomic64_t *v)\
+static inline s64 arch_atomic64_sub_return##name(s64 i, atomic64_t *v) \
 {                                                                      \
-       register long x0 asm ("x0") = i;                                \
+       register s64 x0 asm ("x0") = i;                                 \
        register atomic64_t *x1 asm ("x1") = v;                         \
                                                                        \
        asm volatile(ARM64_LSE_ATOMIC_INSN(                             \
 #undef ATOMIC64_OP_SUB_RETURN
 
 #define ATOMIC64_FETCH_OP_SUB(name, mb, cl...)                         \
-static inline long arch_atomic64_fetch_sub##name(long i, atomic64_t *v)        \
+static inline s64 arch_atomic64_fetch_sub##name(s64 i, atomic64_t *v)  \
 {                                                                      \
-       register long x0 asm ("x0") = i;                                \
+       register s64 x0 asm ("x0") = i;                                 \
        register atomic64_t *x1 asm ("x1") = v;                         \
                                                                        \
        asm volatile(ARM64_LSE_ATOMIC_INSN(                             \
 
 #undef ATOMIC64_FETCH_OP_SUB
 
-static inline long arch_atomic64_dec_if_positive(atomic64_t *v)
+static inline s64 arch_atomic64_dec_if_positive(atomic64_t *v)
 {
        register long x0 asm ("x0") = (long)v;