Merge remote-tracking branches 'regulator/fix/88pm800', 'regulator/fix/max8973',...
[linux-drm-fsl-dcu.git] / arch / arc / include / asm / atomic.h
index 9917a45fc430d042a4f59006abf84ceedad1bca7..03484cb4d16d2eb4fada0095ee427726c23bd2e1 100644 (file)
 
 #define atomic_set(v, i) (((v)->counter) = (i))
 
+#ifdef CONFIG_ISA_ARCV2
+#define PREFETCHW      "       prefetchw   [%1]        \n"
+#else
+#define PREFETCHW
+#endif
+
 #define ATOMIC_OP(op, c_op, asm_op)                                    \
 static inline void atomic_##op(int i, atomic_t *v)                     \
 {                                                                      \
        unsigned int temp;                                              \
                                                                        \
        __asm__ __volatile__(                                           \
-       "1:     llock   %0, [%1]        \n"                             \
+       "1:                             \n"                             \
+       PREFETCHW                                                       \
+       "       llock   %0, [%1]        \n"                             \
        "       " #asm_op " %0, %0, %2  \n"                             \
        "       scond   %0, [%1]        \n"                             \
        "       bnz     1b              \n"                             \
@@ -43,8 +51,16 @@ static inline int atomic_##op##_return(int i, atomic_t *v)           \
 {                                                                      \
        unsigned int temp;                                              \
                                                                        \
+       /*                                                              \
+        * Explicit full memory barrier needed before/after as          \
+        * LLOCK/SCOND thmeselves don't provide any such semantics      \
+        */                                                             \
+       smp_mb();                                                       \
+                                                                       \
        __asm__ __volatile__(                                           \
-       "1:     llock   %0, [%1]        \n"                             \
+       "1:                             \n"                             \
+       PREFETCHW                                                       \
+       "       llock   %0, [%1]        \n"                             \
        "       " #asm_op " %0, %0, %2  \n"                             \
        "       scond   %0, [%1]        \n"                             \
        "       bnz     1b              \n"                             \
@@ -52,6 +68,8 @@ static inline int atomic_##op##_return(int i, atomic_t *v)            \
        : "r"(&v->counter), "ir"(i)                                     \
        : "cc");                                                        \
                                                                        \
+       smp_mb();                                                       \
+                                                                       \
        return temp;                                                    \
 }
 
@@ -105,6 +123,9 @@ static inline int atomic_##op##_return(int i, atomic_t *v)          \
        unsigned long flags;                                            \
        unsigned long temp;                                             \
                                                                        \
+       /*                                                              \
+        * spin lock/unlock provides the needed smp_mb() before/after   \
+        */                                                             \
        atomic_ops_lock(flags);                                         \
        temp = v->counter;                                              \
        temp c_op i;                                                    \
@@ -142,9 +163,19 @@ ATOMIC_OP(and, &=, and)
 #define __atomic_add_unless(v, a, u)                                   \
 ({                                                                     \
        int c, old;                                                     \
+                                                                       \
+       /*                                                              \
+        * Explicit full memory barrier needed before/after as          \
+        * LLOCK/SCOND thmeselves don't provide any such semantics      \
+        */                                                             \
+       smp_mb();                                                       \
+                                                                       \
        c = atomic_read(v);                                             \
        while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c)\
                c = old;                                                \
+                                                                       \
+       smp_mb();                                                       \
+                                                                       \
        c;                                                              \
 })