#ifndef __ASM_SH_FUTEX_IRQ_H
 #define __ASM_SH_FUTEX_IRQ_H
 
-
-static inline int atomic_futex_op_xchg_set(int oparg, u32 __user *uaddr,
-                                          int *oldval)
-{
-       unsigned long flags;
-       int ret;
-
-       local_irq_save(flags);
-
-       ret = get_user(*oldval, uaddr);
-       if (!ret)
-               ret = put_user(oparg, uaddr);
-
-       local_irq_restore(flags);
-
-       return ret;
-}
-
-static inline int atomic_futex_op_xchg_add(int oparg, u32 __user *uaddr,
-                                          int *oldval)
-{
-       unsigned long flags;
-       int ret;
-
-       local_irq_save(flags);
-
-       ret = get_user(*oldval, uaddr);
-       if (!ret)
-               ret = put_user(*oldval + oparg, uaddr);
-
-       local_irq_restore(flags);
-
-       return ret;
-}
-
-static inline int atomic_futex_op_xchg_or(int oparg, u32 __user *uaddr,
-                                         int *oldval)
-{
-       unsigned long flags;
-       int ret;
-
-       local_irq_save(flags);
-
-       ret = get_user(*oldval, uaddr);
-       if (!ret)
-               ret = put_user(*oldval | oparg, uaddr);
-
-       local_irq_restore(flags);
-
-       return ret;
-}
-
-static inline int atomic_futex_op_xchg_and(int oparg, u32 __user *uaddr,
-                                          int *oldval)
-{
-       unsigned long flags;
-       int ret;
-
-       local_irq_save(flags);
-
-       ret = get_user(*oldval, uaddr);
-       if (!ret)
-               ret = put_user(*oldval & oparg, uaddr);
-
-       local_irq_restore(flags);
-
-       return ret;
-}
-
-static inline int atomic_futex_op_xchg_xor(int oparg, u32 __user *uaddr,
-                                          int *oldval)
-{
-       unsigned long flags;
-       int ret;
-
-       local_irq_save(flags);
-
-       ret = get_user(*oldval, uaddr);
-       if (!ret)
-               ret = put_user(*oldval ^ oparg, uaddr);
-
-       local_irq_restore(flags);
-
-       return ret;
-}
-
 static inline int atomic_futex_op_cmpxchg_inatomic(u32 *uval,
                                                   u32 __user *uaddr,
                                                   u32 oldval, u32 newval)
 
--- /dev/null
+#ifndef __ASM_SH_FUTEX_LLSC_H
+#define __ASM_SH_FUTEX_LLSC_H
+
+static inline int atomic_futex_op_cmpxchg_inatomic(u32 *uval,
+                                                  u32 __user *uaddr,
+                                                  u32 oldval, u32 newval)
+{
+       int err = 0;
+       __asm__ __volatile__(
+               "synco\n"
+               "1:\n\t"
+               "movli.l        @%2, r0\n\t"
+               "mov    r0, %1\n\t"
+               "cmp/eq %1, %4\n\t"
+               "bf     2f\n\t"
+               "mov    %5, r0\n\t"
+               "movco.l        r0, @%2\n\t"
+               "bf     1b\n"
+               "2:\n\t"
+               "synco\n\t"
+#ifdef CONFIG_MMU
+               ".section       .fixup,\"ax\"\n"
+               "3:\n\t"
+               "mov.l  4f, %0\n\t"
+               "jmp    @%0\n\t"
+               " mov   %3, %0\n\t"
+               ".balign        4\n"
+               "4:     .long   2b\n\t"
+               ".previous\n"
+               ".section       __ex_table,\"a\"\n\t"
+               ".long  1b, 3b\n\t"
+               ".previous"
+#endif
+               :"+r" (err), "=&r" (*uval)
+               :"r" (uaddr), "i" (-EFAULT), "r" (oldval), "r" (newval)
+               :"t", "memory", "r0");
+       if (err) return err;
+       return 0;
+}
+
+#endif /* __ASM_SH_FUTEX_LLSC_H */
 
 #include <linux/uaccess.h>
 #include <asm/errno.h>
 
-/* XXX: UP variants, fix for SH-4A and SMP.. */
+#if !defined(CONFIG_SMP)
 #include <asm/futex-irq.h>
+#elif defined(CONFIG_CPU_J2)
+#include <asm/futex-cas.h>
+#elif defined(CONFIG_CPU_SH4A)
+#include <asm/futex-llsc.h>
+#else
+#error SMP not supported on this configuration.
+#endif
+
+static inline int
+futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
+                             u32 oldval, u32 newval)
+{
+       if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
+               return -EFAULT;
+
+       return atomic_futex_op_cmpxchg_inatomic(uval, uaddr, oldval, newval);
+}
 
 static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr)
 {
        int op = (encoded_op >> 28) & 7;
        int cmp = (encoded_op >> 24) & 15;
-       int oparg = (encoded_op << 8) >> 20;
-       int cmparg = (encoded_op << 20) >> 20;
-       int oldval = 0, ret;
+       u32 oparg = (encoded_op << 8) >> 20;
+       u32 cmparg = (encoded_op << 20) >> 20;
+       u32 oldval, newval, prev;
+       int ret;
 
        if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
                oparg = 1 << oparg;
 
        pagefault_disable();
 
-       switch (op) {
-       case FUTEX_OP_SET:
-               ret = atomic_futex_op_xchg_set(oparg, uaddr, &oldval);
-               break;
-       case FUTEX_OP_ADD:
-               ret = atomic_futex_op_xchg_add(oparg, uaddr, &oldval);
-               break;
-       case FUTEX_OP_OR:
-               ret = atomic_futex_op_xchg_or(oparg, uaddr, &oldval);
-               break;
-       case FUTEX_OP_ANDN:
-               ret = atomic_futex_op_xchg_and(~oparg, uaddr, &oldval);
-               break;
-       case FUTEX_OP_XOR:
-               ret = atomic_futex_op_xchg_xor(oparg, uaddr, &oldval);
-               break;
-       default:
-               ret = -ENOSYS;
-               break;
-       }
+       do {
+               if (op == FUTEX_OP_SET)
+                       ret = oldval = 0;
+               else
+                       ret = get_user(oldval, uaddr);
+
+               if (ret) break;
+
+               switch (op) {
+               case FUTEX_OP_SET:
+                       newval = oparg;
+                       break;
+               case FUTEX_OP_ADD:
+                       newval = oldval + oparg;
+                       break;
+               case FUTEX_OP_OR:
+                       newval = oldval | oparg;
+                       break;
+               case FUTEX_OP_ANDN:
+                       newval = oldval & ~oparg;
+                       break;
+               case FUTEX_OP_XOR:
+                       newval = oldval ^ oparg;
+                       break;
+               default:
+                       ret = -ENOSYS;
+                       break;
+               }
+
+               if (ret) break;
+
+               ret = futex_atomic_cmpxchg_inatomic(&prev, uaddr, oldval, newval);
+       } while (!ret && prev != oldval);
 
        pagefault_enable();
 
                switch (cmp) {
                case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
                case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
-               case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
-               case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
-               case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
-               case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
+               case FUTEX_OP_CMP_LT: ret = ((int)oldval < (int)cmparg); break;
+               case FUTEX_OP_CMP_GE: ret = ((int)oldval >= (int)cmparg); break;
+               case FUTEX_OP_CMP_LE: ret = ((int)oldval <= (int)cmparg); break;
+               case FUTEX_OP_CMP_GT: ret = ((int)oldval > (int)cmparg); break;
                default: ret = -ENOSYS;
                }
        }
        return ret;
 }
 
-static inline int
-futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
-                             u32 oldval, u32 newval)
-{
-       if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
-               return -EFAULT;
-
-       return atomic_futex_op_cmpxchg_inatomic(uval, uaddr, oldval, newval);
-}
-
 #endif /* __KERNEL__ */
 #endif /* __ASM_SH_FUTEX_H */