arch_cmpxchg() should check data size rather than pointer size in case
CONFIG_ARC_HAS_LLSC is defined. So rename __cmpxchg to __cmpxchg_32 to
emphasize it's explicit support of 32bit data size with BUILD_BUG_ON()
added to avoid any possible misuses with unsupported data types.

In case CONFIG_ARC_HAS_LLSC is undefined, arch_cmpxchg() uses spinlock
to accomplish SMP-safety, so the BUILD_BUG_ON checking is uncecessary.

v2 -> v3:
  - Patches regrouped and has the improvement for xtensa included
  - Comments refined to address why these changes are needed

v1 -> v2:
  - Try using native cmpxchg variants if avaialble, as Arnd advised

Signed-off-by: wuqiang.matt <wuqiang.m...@bytedance.com>
Reviewed-by: Masami Hiramatsu (Google) <mhira...@kernel.org>
---
 arch/arc/include/asm/cmpxchg.h | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)

diff --git a/arch/arc/include/asm/cmpxchg.h b/arch/arc/include/asm/cmpxchg.h
index e138fde067de..bf46514f6f12 100644
--- a/arch/arc/include/asm/cmpxchg.h
+++ b/arch/arc/include/asm/cmpxchg.h
@@ -18,14 +18,16 @@
  * if (*ptr == @old)
  *      *ptr = @new
  */
-#define __cmpxchg(ptr, old, new)                                       \
+#define __cmpxchg_32(ptr, old, new)                                    \
 ({                                                                     \
        __typeof__(*(ptr)) _prev;                                       \
                                                                        \
+       BUILD_BUG_ON(sizeof(*(ptr)) != 4);                              \
+                                                                       \
        __asm__ __volatile__(                                           \
-       "1:     llock  %0, [%1] \n"                                     \
+       "1:     llock  %0, [%1]         \n"                             \
        "       brne   %0, %2, 2f       \n"                             \
-       "       scond  %3, [%1] \n"                                     \
+       "       scond  %3, [%1]         \n"                             \
        "       bnz     1b              \n"                             \
        "2:                             \n"                             \
        : "=&r"(_prev)  /* Early clobber prevent reg reuse */           \
@@ -47,7 +49,7 @@
                                                                        \
        switch(sizeof((_p_))) {                                         \
        case 4:                                                         \
-               _prev_ = __cmpxchg(_p_, _o_, _n_);                      \
+               _prev_ = __cmpxchg_32(_p_, _o_, _n_);                   \
                break;                                                  \
        default:                                                        \
                BUILD_BUG();                                            \
@@ -65,8 +67,6 @@
        __typeof__(*(ptr)) _prev_;                                      \
        unsigned long __flags;                                          \
                                                                        \
-       BUILD_BUG_ON(sizeof(_p_) != 4);                                 \
-                                                                       \
        /*                                                              \
         * spin lock/unlock provide the needed smp_mb() before/after    \
         */                                                             \
-- 
2.40.1


_______________________________________________
linux-snps-arc mailing list
linux-snps-arc@lists.infradead.org
http://lists.infradead.org/mailman/listinfo/linux-snps-arc

Reply via email to