Uses the arch/x86/ kernel code for x86_64/i386, fallbacking to a gcc
intrinsics implementation.

Signed-off-by: He Kuang <heku...@huawei.com>
Signed-off-by: Wang Nan <wangn...@huawei.com>
---
 tools/arch/x86/include/asm/atomic.h    | 28 ++++++++++++++++++++++++++++
 tools/include/asm-generic/atomic-gcc.h | 10 ++++++++++
 tools/include/linux/types.h            |  4 ++++
 3 files changed, 42 insertions(+)

diff --git a/tools/arch/x86/include/asm/atomic.h 
b/tools/arch/x86/include/asm/atomic.h
index 059e33e..41f814e 100644
--- a/tools/arch/x86/include/asm/atomic.h
+++ b/tools/arch/x86/include/asm/atomic.h
@@ -62,4 +62,32 @@ static inline int atomic_dec_and_test(atomic_t *v)
        GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", "e");
 }
 
+/**
+ * atomic_add - add integer to atomic variable
+ * @i: integer value to add
+ * @v: pointer of type atomic_t
+ *
+ * Atomically adds @i to @v.
+ */
+static __always_inline void atomic_add(int i, atomic_t *v)
+{
+       asm volatile(LOCK_PREFIX "addl %1,%0"
+                    : "+m" (v->counter)
+                    : "ir" (i));
+}
+
+/**
+ * atomic64_add - add integer to atomic64 variable
+ * @i: integer value to add
+ * @v: pointer to type atomic64_t
+ *
+ * Atomically adds @i to @v.
+ */
+static __always_inline void atomic64_add(long i, atomic64_t *v)
+{
+       asm volatile(LOCK_PREFIX "addq %1,%0"
+                    : "=m" (v->counter)
+                    : "er" (i), "m" (v->counter));
+}
+
 #endif /* _TOOLS_LINUX_ASM_X86_ATOMIC_H */
diff --git a/tools/include/asm-generic/atomic-gcc.h 
b/tools/include/asm-generic/atomic-gcc.h
index 2ba78c9..b615907 100644
--- a/tools/include/asm-generic/atomic-gcc.h
+++ b/tools/include/asm-generic/atomic-gcc.h
@@ -60,4 +60,14 @@ static inline int atomic_dec_and_test(atomic_t *v)
        return __sync_sub_and_fetch(&v->counter, 1) == 0;
 }
 
+static inline void atomic_add(int i, atomic_t *v)
+{
+       __sync_add_and_fetch(&v->counter, i);
+}
+
+static inline void atomic64_add(long i, atomic64_t *v)
+{
+       __sync_add_and_fetch(&v->counter, i);
+}
+
 #endif /* __TOOLS_ASM_GENERIC_ATOMIC_H */
diff --git a/tools/include/linux/types.h b/tools/include/linux/types.h
index 8ebf627..09b325f 100644
--- a/tools/include/linux/types.h
+++ b/tools/include/linux/types.h
@@ -64,6 +64,10 @@ typedef struct {
        int counter;
 } atomic_t;
 
+typedef struct {
+       long counter;
+} atomic64_t;
+
 #ifndef __aligned_u64
 # define __aligned_u64 __u64 __attribute__((aligned(8)))
 #endif
-- 
1.8.5.2

Reply via email to