* Ensure each lock is in a separate cacheline.
  */
 static union {
-       raw_spinlock_t lock;
+       arch_spinlock_t lock;
        char pad[L1_CACHE_BYTES];
 } atomic64_lock[NR_LOCKS] __cacheline_aligned_in_smp = {
        [0 ... (NR_LOCKS - 1)] = {
-               .lock =  __RAW_SPIN_LOCK_UNLOCKED(atomic64_lock.lock),
+               .lock =  __ARCH_SPIN_LOCK_UNLOCKED,
        },
 };
 
-static inline raw_spinlock_t *lock_addr(const atomic64_t *v)
+static inline arch_spinlock_t *lock_addr(const atomic64_t *v)
 {
        unsigned long addr = (unsigned long) v;
 
 s64 generic_atomic64_read(const atomic64_t *v)
 {
        unsigned long flags;
-       raw_spinlock_t *lock = lock_addr(v);
+       arch_spinlock_t *lock = lock_addr(v);
        s64 val;
 
-       raw_spin_lock_irqsave(lock, flags);
+       local_irq_save(flags);
+       arch_spin_lock(lock);
        val = v->counter;
-       raw_spin_unlock_irqrestore(lock, flags);
+       arch_spin_unlock(lock);
+       local_irq_restore(flags);
        return val;
 }
 EXPORT_SYMBOL(generic_atomic64_read);
 void generic_atomic64_set(atomic64_t *v, s64 i)
 {
        unsigned long flags;
-       raw_spinlock_t *lock = lock_addr(v);
+       arch_spinlock_t *lock = lock_addr(v);
 
-       raw_spin_lock_irqsave(lock, flags);
+       local_irq_save(flags);
+       arch_spin_lock(lock);
        v->counter = i;
-       raw_spin_unlock_irqrestore(lock, flags);
+       arch_spin_unlock(lock);
+       local_irq_restore(flags);
 }
 EXPORT_SYMBOL(generic_atomic64_set);
 
 void generic_atomic64_##op(s64 a, atomic64_t *v)                       \
 {                                                                      \
        unsigned long flags;                                            \
-       raw_spinlock_t *lock = lock_addr(v);                            \
+       arch_spinlock_t *lock = lock_addr(v);                           \
                                                                        \
-       raw_spin_lock_irqsave(lock, flags);                             \
+       local_irq_save(flags);                                          \
+       arch_spin_lock(lock);                                           \
        v->counter c_op a;                                              \
-       raw_spin_unlock_irqrestore(lock, flags);                        \
+       arch_spin_unlock(lock);                                         \
+       local_irq_restore(flags);                                       \
 }                                                                      \
 EXPORT_SYMBOL(generic_atomic64_##op);
 
 s64 generic_atomic64_##op##_return(s64 a, atomic64_t *v)               \
 {                                                                      \
        unsigned long flags;                                            \
-       raw_spinlock_t *lock = lock_addr(v);                            \
+       arch_spinlock_t *lock = lock_addr(v);                           \
        s64 val;                                                        \
                                                                        \
-       raw_spin_lock_irqsave(lock, flags);                             \
+       local_irq_save(flags);                                          \
+       arch_spin_lock(lock);                                           \
        val = (v->counter c_op a);                                      \
-       raw_spin_unlock_irqrestore(lock, flags);                        \
+       arch_spin_unlock(lock);                                         \
+       local_irq_restore(flags);                                       \
        return val;                                                     \
 }                                                                      \
 EXPORT_SYMBOL(generic_atomic64_##op##_return);
 s64 generic_atomic64_fetch_##op(s64 a, atomic64_t *v)                  \
 {                                                                      \
        unsigned long flags;                                            \
-       raw_spinlock_t *lock = lock_addr(v);                            \
+       arch_spinlock_t *lock = lock_addr(v);                           \
        s64 val;                                                        \
                                                                        \
-       raw_spin_lock_irqsave(lock, flags);                             \
+       local_irq_save(flags);                                          \
+       arch_spin_lock(lock);                                           \
        val = v->counter;                                               \
        v->counter c_op a;                                              \
-       raw_spin_unlock_irqrestore(lock, flags);                        \
+       arch_spin_unlock(lock);                                         \
+       local_irq_restore(flags);                                       \
        return val;                                                     \
 }                                                                      \
 EXPORT_SYMBOL(generic_atomic64_fetch_##op);
 s64 generic_atomic64_dec_if_positive(atomic64_t *v)
 {
        unsigned long flags;
-       raw_spinlock_t *lock = lock_addr(v);
+       arch_spinlock_t *lock = lock_addr(v);
        s64 val;
 
-       raw_spin_lock_irqsave(lock, flags);
+       local_irq_save(flags);
+       arch_spin_lock(lock);
        val = v->counter - 1;
        if (val >= 0)
                v->counter = val;
-       raw_spin_unlock_irqrestore(lock, flags);
+       arch_spin_unlock(lock);
+       local_irq_restore(flags);
        return val;
 }
 EXPORT_SYMBOL(generic_atomic64_dec_if_positive);
 s64 generic_atomic64_cmpxchg(atomic64_t *v, s64 o, s64 n)
 {
        unsigned long flags;
-       raw_spinlock_t *lock = lock_addr(v);
+       arch_spinlock_t *lock = lock_addr(v);
        s64 val;
 
-       raw_spin_lock_irqsave(lock, flags);
+       local_irq_save(flags);
+       arch_spin_lock(lock);
        val = v->counter;
        if (val == o)
                v->counter = n;
-       raw_spin_unlock_irqrestore(lock, flags);
+       arch_spin_unlock(lock);
+       local_irq_restore(flags);
        return val;
 }
 EXPORT_SYMBOL(generic_atomic64_cmpxchg);
 s64 generic_atomic64_xchg(atomic64_t *v, s64 new)
 {
        unsigned long flags;
-       raw_spinlock_t *lock = lock_addr(v);
+       arch_spinlock_t *lock = lock_addr(v);
        s64 val;
 
-       raw_spin_lock_irqsave(lock, flags);
+       local_irq_save(flags);
+       arch_spin_lock(lock);
        val = v->counter;
        v->counter = new;
-       raw_spin_unlock_irqrestore(lock, flags);
+       arch_spin_unlock(lock);
+       local_irq_restore(flags);
        return val;
 }
 EXPORT_SYMBOL(generic_atomic64_xchg);
 s64 generic_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
 {
        unsigned long flags;
-       raw_spinlock_t *lock = lock_addr(v);
+       arch_spinlock_t *lock = lock_addr(v);
        s64 val;
 
-       raw_spin_lock_irqsave(lock, flags);
+       local_irq_save(flags);
+       arch_spin_lock(lock);
        val = v->counter;
        if (val != u)
                v->counter += a;
-       raw_spin_unlock_irqrestore(lock, flags);
+       arch_spin_unlock(lock);
+       local_irq_restore(flags);
 
        return val;
 }