sh: support 1 and 2 byte xchg
[cascardo/linux.git] / arch / sh / include / asm / cmpxchg-llsc.h
index 4713666..e754794 100644 (file)
@@ -1,6 +1,9 @@
 #ifndef __ASM_SH_CMPXCHG_LLSC_H
 #define __ASM_SH_CMPXCHG_LLSC_H
 
+#include <linux/bitops.h>
+#include <asm/byteorder.h>
+
 static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val)
 {
        unsigned long retval;
@@ -22,29 +25,8 @@ static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val)
        return retval;
 }
 
-static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
-{
-       unsigned long retval;
-       unsigned long tmp;
-
-       __asm__ __volatile__ (
-               "1:                                     \n\t"
-               "movli.l        @%2, %0 ! xchg_u8       \n\t"
-               "mov            %0, %1                  \n\t"
-               "mov            %3, %0                  \n\t"
-               "movco.l        %0, @%2                 \n\t"
-               "bf             1b                      \n\t"
-               "synco                                  \n\t"
-               : "=&z"(tmp), "=&r" (retval)
-               : "r" (m), "r" (val & 0xff)
-               : "t", "memory"
-       );
-
-       return retval;
-}
-
 static inline unsigned long
-__cmpxchg_u32(volatile int *m, unsigned long old, unsigned long new)
+__cmpxchg_u32(volatile u32 *m, unsigned long old, unsigned long new)
 {
        unsigned long retval;
        unsigned long tmp;
@@ -68,4 +50,36 @@ __cmpxchg_u32(volatile int *m, unsigned long old, unsigned long new)
        return retval;
 }
 
+static inline u32 __xchg_cmpxchg(volatile void *ptr, u32 x, int size)
+{
+       int off = (unsigned long)ptr % sizeof(u32);
+       volatile u32 *p = ptr - off;
+#ifdef __BIG_ENDIAN
+       int bitoff = (sizeof(u32) - 1 - off) * BITS_PER_BYTE;
+#else
+       int bitoff = off * BITS_PER_BYTE;
+#endif
+       u32 bitmask = ((0x1 << size * BITS_PER_BYTE) - 1) << bitoff;
+       u32 oldv, newv;
+       u32 ret;
+
+       do {
+               oldv = READ_ONCE(*p);
+               ret = (oldv & bitmask) >> bitoff;
+               newv = (oldv & ~bitmask) | (x << bitoff);
+       } while (__cmpxchg_u32(p, oldv, newv) != oldv);
+
+       return ret;
+}
+
+static inline unsigned long xchg_u16(volatile u16 *m, unsigned long val)
+{
+       return __xchg_cmpxchg(m, val, sizeof *m);
+}
+
+static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
+{
+       return __xchg_cmpxchg(m, val, sizeof *m);
+}
+
 #endif /* __ASM_SH_CMPXCHG_LLSC_H */