mirror of
https://git.kernel.org/pub/scm/linux/kernel/git/next/linux-next.git
synced 2025-01-10 07:50:04 +00:00
sparc: Provide atomic_{or,xor,and}
Implement atomic logic ops -- atomic_{or,xor,and}. These will replace the atomic_{set,clear}_mask functions that are available on some archs. Acked-by: David S. Miller <davem@davemloft.net> Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
This commit is contained in:
parent
658aa51459
commit
304a0d699a
@ -17,10 +17,14 @@
|
||||
#include <asm/barrier.h>
|
||||
#include <asm-generic/atomic64.h>
|
||||
|
||||
#define CONFIG_ARCH_HAS_ATOMIC_OR
|
||||
|
||||
#define ATOMIC_INIT(i) { (i) }
|
||||
|
||||
int atomic_add_return(int, atomic_t *);
|
||||
void atomic_and(int, atomic_t *);
|
||||
void atomic_or(int, atomic_t *);
|
||||
void atomic_xor(int, atomic_t *);
|
||||
int atomic_cmpxchg(atomic_t *, int, int);
|
||||
int atomic_xchg(atomic_t *, int);
|
||||
int __atomic_add_unless(atomic_t *, int, int);
|
||||
|
@ -33,6 +33,12 @@ long atomic64_##op##_return(long, atomic64_t *);
|
||||
ATOMIC_OPS(add)
|
||||
ATOMIC_OPS(sub)
|
||||
|
||||
#define CONFIG_ARCH_HAS_ATOMIC_OR
|
||||
|
||||
ATOMIC_OP(and)
|
||||
ATOMIC_OP(or)
|
||||
ATOMIC_OP(xor)
|
||||
|
||||
#undef ATOMIC_OPS
|
||||
#undef ATOMIC_OP_RETURN
|
||||
#undef ATOMIC_OP
|
||||
|
@ -27,22 +27,38 @@ static DEFINE_SPINLOCK(dummy);
|
||||
|
||||
#endif /* SMP */
|
||||
|
||||
#define ATOMIC_OP(op, cop) \
|
||||
#define ATOMIC_OP_RETURN(op, c_op) \
|
||||
int atomic_##op##_return(int i, atomic_t *v) \
|
||||
{ \
|
||||
int ret; \
|
||||
unsigned long flags; \
|
||||
spin_lock_irqsave(ATOMIC_HASH(v), flags); \
|
||||
\
|
||||
ret = (v->counter cop i); \
|
||||
ret = (v->counter c_op i); \
|
||||
\
|
||||
spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \
|
||||
return ret; \
|
||||
} \
|
||||
EXPORT_SYMBOL(atomic_##op##_return);
|
||||
|
||||
ATOMIC_OP(add, +=)
|
||||
#define ATOMIC_OP(op, c_op) \
|
||||
void atomic_##op(int i, atomic_t *v) \
|
||||
{ \
|
||||
unsigned long flags; \
|
||||
spin_lock_irqsave(ATOMIC_HASH(v), flags); \
|
||||
\
|
||||
v->counter c_op i; \
|
||||
\
|
||||
spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \
|
||||
} \
|
||||
EXPORT_SYMBOL(atomic_##op);
|
||||
|
||||
ATOMIC_OP_RETURN(add, +=)
|
||||
ATOMIC_OP(and, &=)
|
||||
ATOMIC_OP(or, |=)
|
||||
ATOMIC_OP(xor, ^=)
|
||||
|
||||
#undef ATOMIC_OP_RETURN
|
||||
#undef ATOMIC_OP
|
||||
|
||||
int atomic_xchg(atomic_t *v, int new)
|
||||
|
@ -47,6 +47,9 @@ ENDPROC(atomic_##op##_return);
|
||||
|
||||
ATOMIC_OPS(add)
|
||||
ATOMIC_OPS(sub)
|
||||
ATOMIC_OP(and)
|
||||
ATOMIC_OP(or)
|
||||
ATOMIC_OP(xor)
|
||||
|
||||
#undef ATOMIC_OPS
|
||||
#undef ATOMIC_OP_RETURN
|
||||
@ -84,6 +87,9 @@ ENDPROC(atomic64_##op##_return);
|
||||
|
||||
ATOMIC64_OPS(add)
|
||||
ATOMIC64_OPS(sub)
|
||||
ATOMIC64_OP(and)
|
||||
ATOMIC64_OP(or)
|
||||
ATOMIC64_OP(xor)
|
||||
|
||||
#undef ATOMIC64_OPS
|
||||
#undef ATOMIC64_OP_RETURN
|
||||
|
@ -111,6 +111,9 @@ EXPORT_SYMBOL(atomic64_##op##_return);
|
||||
|
||||
ATOMIC_OPS(add)
|
||||
ATOMIC_OPS(sub)
|
||||
ATOMIC_OP(and)
|
||||
ATOMIC_OP(or)
|
||||
ATOMIC_OP(xor)
|
||||
|
||||
#undef ATOMIC_OPS
|
||||
#undef ATOMIC_OP_RETURN
|
||||
|
Loading…
x
Reference in New Issue
Block a user