summaryrefslogtreecommitdiff
path: root/arch/tile
diff options
context:
space:
mode:
authorArun Sharma <asharma@fb.com>2011-07-26 16:09:07 -0700
committerLinus Torvalds <torvalds@linux-foundation.org>2011-07-26 16:49:47 -0700
commitf24219b4e90cf70ec4a211b17fbabc725a0ddf3c (patch)
treec1c753bd425d61a5094995d9835b23b46383d9b2 /arch/tile
parent60063497a95e716c9a689af3be2687d261f115b4 (diff)
atomic: move atomic_add_unless to generic code
This is in preparation for more generic atomic primitives based on __atomic_add_unless. Signed-off-by: Arun Sharma <asharma@fb.com> Signed-off-by: Hans-Christian Egtvedt <hans-christian.egtvedt@atmel.com> Reviewed-by: Eric Dumazet <eric.dumazet@gmail.com> Cc: Ingo Molnar <mingo@elte.hu> Cc: David Miller <davem@davemloft.net> Acked-by: Mike Frysinger <vapier@gentoo.org> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'arch/tile')
-rw-r--r--arch/tile/include/asm/atomic_32.h10
-rw-r--r--arch/tile/include/asm/atomic_64.h4
2 files changed, 7 insertions, 7 deletions
diff --git a/arch/tile/include/asm/atomic_32.h b/arch/tile/include/asm/atomic_32.h
index 246feed4794..c03349e0ca9 100644
--- a/arch/tile/include/asm/atomic_32.h
+++ b/arch/tile/include/asm/atomic_32.h
@@ -81,18 +81,18 @@ static inline int atomic_add_return(int i, atomic_t *v)
}
/**
- * atomic_add_unless - add unless the number is already a given value
+ * __atomic_add_unless - add unless the number is already a given value
* @v: pointer of type atomic_t
* @a: the amount to add to v...
* @u: ...unless v is equal to u.
*
* Atomically adds @a to @v, so long as @v was not already @u.
- * Returns non-zero if @v was not @u, and zero otherwise.
+ * Returns the old value of @v.
*/
-static inline int atomic_add_unless(atomic_t *v, int a, int u)
+static inline int __atomic_add_unless(atomic_t *v, int a, int u)
{
smp_mb(); /* barrier for proper semantics */
- return _atomic_xchg_add_unless(v, a, u) != u;
+ return _atomic_xchg_add_unless(v, a, u);
}
/**
@@ -199,7 +199,7 @@ static inline u64 atomic64_add_return(u64 i, atomic64_t *v)
* @u: ...unless v is equal to u.
*
* Atomically adds @a to @v, so long as @v was not already @u.
- * Returns non-zero if @v was not @u, and zero otherwise.
+ * Returns the old value of @v.
*/
static inline u64 atomic64_add_unless(atomic64_t *v, u64 a, u64 u)
{
diff --git a/arch/tile/include/asm/atomic_64.h b/arch/tile/include/asm/atomic_64.h
index a48dda30cbc..27fe667fddf 100644
--- a/arch/tile/include/asm/atomic_64.h
+++ b/arch/tile/include/asm/atomic_64.h
@@ -64,7 +64,7 @@ static inline int atomic_add_return(int i, atomic_t *v)
return val;
}
-static inline int atomic_add_unless(atomic_t *v, int a, int u)
+static inline int __atomic_add_unless(atomic_t *v, int a, int u)
{
int guess, oldval = v->counter;
do {
@@ -73,7 +73,7 @@ static inline int atomic_add_unless(atomic_t *v, int a, int u)
guess = oldval;
oldval = atomic_cmpxchg(v, guess, guess + a);
} while (guess != oldval);
- return oldval != u;
+ return oldval;
}
/* Now the true 64-bit operations. */