.quad sys_ni_syscall            /* pselect6 for now */
        .quad sys_ni_syscall            /* ppoll for now */
        .quad sys_unshare               /* 310 */
+       .quad compat_sys_set_robust_list
+       .quad compat_sys_get_robust_list
 ia32_syscall_end:              
        .rept IA32_NR_syscalls-(ia32_syscall_end-ia32_sys_call_table)/8
                .quad ni_syscall
 
 static inline int
 futex_atomic_cmpxchg_inuser(int __user *uaddr, int oldval, int newval)
 {
-       return -ENOSYS;
+       if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
+               return -EFAULT;
+
+       __asm__ __volatile__(
+               "1:     " LOCK_PREFIX "cmpxchgl %3, %1          \n"
+
+               "2:     .section .fixup, \"ax\"                 \n"
+               "3:     mov     %2, %0                          \n"
+               "       jmp     2b                              \n"
+               "       .previous                               \n"
+
+               "       .section __ex_table, \"a\"              \n"
+               "       .align  8                               \n"
+               "       .quad   1b,3b                           \n"
+               "       .previous                               \n"
+
+               : "=a" (oldval), "=m" (*uaddr)
+               : "i" (-EFAULT), "r" (newval), "0" (oldval)
+               : "memory"
+       );
+
+       return oldval;
 }
 
 #endif
 
 __SYSCALL(__NR_ppoll,  sys_ni_syscall)         /* for now */
 #define __NR_unshare           272
 __SYSCALL(__NR_unshare,        sys_unshare)
+#define __NR_set_robust_list   273
+__SYSCALL(__NR_set_robust_list, sys_set_robust_list)
+#define __NR_get_robust_list   274
+__SYSCALL(__NR_get_robust_list, sys_get_robust_list)
 
-#define __NR_syscall_max __NR_unshare
+#define __NR_syscall_max __NR_get_robust_list
 
 #ifndef __NO_STUBS