*/
 static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
 {
-       unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
+       unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
        int bit = nr & SZLONG_MASK;
        unsigned long temp;
 
-       if (kernel_uses_llsc && R10000_LLSC_WAR) {
+       if (!kernel_uses_llsc) {
+               __mips_set_bit(nr, addr);
+               return;
+       }
+
+       if (R10000_LLSC_WAR) {
                __asm__ __volatile__(
                "       .set    push                                    \n"
                "       .set    arch=r4000                              \n"
                : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m)
                : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m)
                : __LLSC_CLOBBER);
+               return;
+       }
+
 #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
-       } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
+       if (__builtin_constant_p(bit)) {
                loongson_llsc_mb();
                do {
                        __asm__ __volatile__(
                        : "ir" (bit), "r" (~0)
                        : __LLSC_CLOBBER);
                } while (unlikely(!temp));
+               return;
+       }
 #endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
-       } else if (kernel_uses_llsc) {
-               loongson_llsc_mb();
-               do {
-                       __asm__ __volatile__(
-                       "       .set    push                            \n"
-                       "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
-                       "       " __LL "%0, %1          # set_bit       \n"
-                       "       or      %0, %2                          \n"
-                       "       " __SC  "%0, %1                         \n"
-                       "       .set    pop                             \n"
-                       : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
-                       : "ir" (1UL << bit)
-                       : __LLSC_CLOBBER);
-               } while (unlikely(!temp));
-       } else
-               __mips_set_bit(nr, addr);
+
+       loongson_llsc_mb();
+       do {
+               __asm__ __volatile__(
+               "       .set    push                            \n"
+               "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
+               "       " __LL "%0, %1          # set_bit       \n"
+               "       or      %0, %2                          \n"
+               "       " __SC  "%0, %1                         \n"
+               "       .set    pop                             \n"
+               : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
+               : "ir" (1UL << bit)
+               : __LLSC_CLOBBER);
+       } while (unlikely(!temp));
 }
 
 /*
  */
 static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
 {
-       unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
+       unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
        int bit = nr & SZLONG_MASK;
        unsigned long temp;
 
-       if (kernel_uses_llsc && R10000_LLSC_WAR) {
+       if (!kernel_uses_llsc) {
+               __mips_clear_bit(nr, addr);
+               return;
+       }
+
+       if (R10000_LLSC_WAR) {
                __asm__ __volatile__(
                "       .set    push                                    \n"
                "       .set    arch=r4000                              \n"
                : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
                : "ir" (~(1UL << bit))
                : __LLSC_CLOBBER);
+               return;
+       }
+
 #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
-       } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
+       if (__builtin_constant_p(bit)) {
                loongson_llsc_mb();
                do {
                        __asm__ __volatile__(
                        : "ir" (bit)
                        : __LLSC_CLOBBER);
                } while (unlikely(!temp));
+               return;
+       }
 #endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
-       } else if (kernel_uses_llsc) {
-               loongson_llsc_mb();
-               do {
-                       __asm__ __volatile__(
-                       "       .set    push                            \n"
-                       "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
-                       "       " __LL "%0, %1          # clear_bit     \n"
-                       "       and     %0, %2                          \n"
-                       "       " __SC "%0, %1                          \n"
-                       "       .set    pop                             \n"
-                       : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
-                       : "ir" (~(1UL << bit))
-                       : __LLSC_CLOBBER);
-               } while (unlikely(!temp));
-       } else
-               __mips_clear_bit(nr, addr);
+
+       loongson_llsc_mb();
+       do {
+               __asm__ __volatile__(
+               "       .set    push                            \n"
+               "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
+               "       " __LL "%0, %1          # clear_bit     \n"
+               "       and     %0, %2                          \n"
+               "       " __SC "%0, %1                          \n"
+               "       .set    pop                             \n"
+               : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
+               : "ir" (~(1UL << bit))
+               : __LLSC_CLOBBER);
+       } while (unlikely(!temp));
 }
 
 /*
  */
 static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
 {
+       unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
        int bit = nr & SZLONG_MASK;
+       unsigned long temp;
 
-       if (kernel_uses_llsc && R10000_LLSC_WAR) {
-               unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
-               unsigned long temp;
+       if (!kernel_uses_llsc) {
+               __mips_change_bit(nr, addr);
+               return;
+       }
 
+       if (R10000_LLSC_WAR) {
                __asm__ __volatile__(
                "       .set    push                            \n"
                "       .set    arch=r4000                      \n"
                : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
                : "ir" (1UL << bit)
                : __LLSC_CLOBBER);
-       } else if (kernel_uses_llsc) {
-               unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
-               unsigned long temp;
+               return;
+       }
 
-               loongson_llsc_mb();
-               do {
-                       __asm__ __volatile__(
-                       "       .set    push                            \n"
-                       "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
-                       "       " __LL "%0, %1          # change_bit    \n"
-                       "       xor     %0, %2                          \n"
-                       "       " __SC  "%0, %1                         \n"
-                       "       .set    pop                             \n"
-                       : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
-                       : "ir" (1UL << bit)
-                       : __LLSC_CLOBBER);
-               } while (unlikely(!temp));
-       } else
-               __mips_change_bit(nr, addr);
+       loongson_llsc_mb();
+       do {
+               __asm__ __volatile__(
+               "       .set    push                            \n"
+               "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
+               "       " __LL "%0, %1          # change_bit    \n"
+               "       xor     %0, %2                          \n"
+               "       " __SC  "%0, %1                         \n"
+               "       .set    pop                             \n"
+               : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
+               : "ir" (1UL << bit)
+               : __LLSC_CLOBBER);
+       } while (unlikely(!temp));
 }
 
 /*
 static inline int test_and_set_bit(unsigned long nr,
        volatile unsigned long *addr)
 {
+       unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
        int bit = nr & SZLONG_MASK;
-       unsigned long res;
+       unsigned long res, temp;
 
        smp_mb__before_llsc();
 
-       if (kernel_uses_llsc && R10000_LLSC_WAR) {
-               unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
-               unsigned long temp;
-
+       if (!kernel_uses_llsc) {
+               res = __mips_test_and_set_bit(nr, addr);
+       } else if (R10000_LLSC_WAR) {
                __asm__ __volatile__(
                "       .set    push                                    \n"
                "       .set    arch=r4000                              \n"
                : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
                : "r" (1UL << bit)
                : __LLSC_CLOBBER);
-       } else if (kernel_uses_llsc) {
-               unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
-               unsigned long temp;
-
+       } else {
                loongson_llsc_mb();
                do {
                        __asm__ __volatile__(
                } while (unlikely(!res));
 
                res = temp & (1UL << bit);
-       } else
-               res = __mips_test_and_set_bit(nr, addr);
+       }
 
        smp_llsc_mb();
 
 static inline int test_and_set_bit_lock(unsigned long nr,
        volatile unsigned long *addr)
 {
+       unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
        int bit = nr & SZLONG_MASK;
-       unsigned long res;
-
-       if (kernel_uses_llsc && R10000_LLSC_WAR) {
-               unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
-               unsigned long temp;
+       unsigned long res, temp;
 
+       if (!kernel_uses_llsc) {
+               res = __mips_test_and_set_bit_lock(nr, addr);
+       } else if (R10000_LLSC_WAR) {
                __asm__ __volatile__(
                "       .set    push                                    \n"
                "       .set    arch=r4000                              \n"
                : "=&r" (temp), "+m" (*m), "=&r" (res)
                : "r" (1UL << bit)
                : __LLSC_CLOBBER);
-       } else if (kernel_uses_llsc) {
-               unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
-               unsigned long temp;
-
-               loongson_llsc_mb();
+       } else {
                do {
                        __asm__ __volatile__(
                        "       .set    push                            \n"
                } while (unlikely(!res));
 
                res = temp & (1UL << bit);
-       } else
-               res = __mips_test_and_set_bit_lock(nr, addr);
+       }
 
        smp_llsc_mb();
 
 static inline int test_and_clear_bit(unsigned long nr,
        volatile unsigned long *addr)
 {
+       unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
        int bit = nr & SZLONG_MASK;
-       unsigned long res;
+       unsigned long res, temp;
 
        smp_mb__before_llsc();
 
-       if (kernel_uses_llsc && R10000_LLSC_WAR) {
-               unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
-               unsigned long temp;
-
+       if (!kernel_uses_llsc) {
+               res = __mips_test_and_clear_bit(nr, addr);
+       } else if (R10000_LLSC_WAR) {
                __asm__ __volatile__(
                "       .set    push                                    \n"
                "       .set    arch=r4000                              \n"
                : "r" (1UL << bit)
                : __LLSC_CLOBBER);
 #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
-       } else if (kernel_uses_llsc && __builtin_constant_p(nr)) {
-               unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
-               unsigned long temp;
-
+       } else if (__builtin_constant_p(nr)) {
                loongson_llsc_mb();
                do {
                        __asm__ __volatile__(
                        : __LLSC_CLOBBER);
                } while (unlikely(!temp));
 #endif
-       } else if (kernel_uses_llsc) {
-               unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
-               unsigned long temp;
-
+       } else {
                loongson_llsc_mb();
                do {
                        __asm__ __volatile__(
                } while (unlikely(!res));
 
                res = temp & (1UL << bit);
-       } else
-               res = __mips_test_and_clear_bit(nr, addr);
+       }
 
        smp_llsc_mb();
 
 static inline int test_and_change_bit(unsigned long nr,
        volatile unsigned long *addr)
 {
+       unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
        int bit = nr & SZLONG_MASK;
-       unsigned long res;
+       unsigned long res, temp;
 
        smp_mb__before_llsc();
 
-       if (kernel_uses_llsc && R10000_LLSC_WAR) {
-               unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
-               unsigned long temp;
-
+       if (!kernel_uses_llsc) {
+               res = __mips_test_and_change_bit(nr, addr);
+       } else if (R10000_LLSC_WAR) {
                __asm__ __volatile__(
                "       .set    push                                    \n"
                "       .set    arch=r4000                              \n"
                : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
                : "r" (1UL << bit)
                : __LLSC_CLOBBER);
-       } else if (kernel_uses_llsc) {
-               unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
-               unsigned long temp;
-
+       } else {
                loongson_llsc_mb();
                do {
                        __asm__ __volatile__(
                } while (unlikely(!res));
 
                res = temp & (1UL << bit);
-       } else
-               res = __mips_test_and_change_bit(nr, addr);
+       }
 
        smp_llsc_mb();