atomic.h

来自「linux 内核源代码」· C头文件 代码 · 共 802 行 · 第 1/2 页

H
802
字号
 */#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)#ifdef CONFIG_64BITtypedef struct { volatile long counter; } atomic64_t;#define ATOMIC64_INIT(i)    { (i) }/* * atomic64_read - read atomic variable * @v: pointer of type atomic64_t * */#define atomic64_read(v)	((v)->counter)/* * atomic64_set - set atomic variable * @v: pointer of type atomic64_t * @i: required value */#define atomic64_set(v, i)	((v)->counter = (i))/* * atomic64_add - add integer to atomic variable * @i: integer value to add * @v: pointer of type atomic64_t * * Atomically adds @i to @v. */static __inline__ void atomic64_add(long i, atomic64_t * v){	if (cpu_has_llsc && R10000_LLSC_WAR) {		unsigned long temp;		__asm__ __volatile__(		"	.set	mips3					\n"		"1:	lld	%0, %1		# atomic64_add		\n"		"	addu	%0, %2					\n"		"	scd	%0, %1					\n"		"	beqzl	%0, 1b					\n"		"	.set	mips0					\n"		: "=&r" (temp), "=m" (v->counter)		: "Ir" (i), "m" (v->counter));	} else if (cpu_has_llsc) {		unsigned long temp;		__asm__ __volatile__(		"	.set	mips3					\n"		"1:	lld	%0, %1		# atomic64_add		\n"		"	addu	%0, %2					\n"		"	scd	%0, %1					\n"		"	beqz	%0, 2f					\n"		"	.subsection 2					\n"		"2:	b	1b					\n"		"	.previous					\n"		"	.set	mips0					\n"		: "=&r" (temp), "=m" (v->counter)		: "Ir" (i), "m" (v->counter));	} else {		unsigned long flags;		raw_local_irq_save(flags);		v->counter += i;		raw_local_irq_restore(flags);	}}/* * atomic64_sub - subtract the atomic variable * @i: integer value to subtract * @v: pointer of type atomic64_t * * Atomically subtracts @i from @v. */static __inline__ void atomic64_sub(long i, atomic64_t * v){	if (cpu_has_llsc && R10000_LLSC_WAR) {		unsigned long temp;		__asm__ __volatile__(		"	.set	mips3					\n"		"1:	lld	%0, %1		# atomic64_sub		\n"		"	subu	%0, %2					\n"		"	scd	%0, %1					\n"		"	beqzl	%0, 1b					\n"		"	.set	mips0					\n"		: "=&r" (temp), "=m" (v->counter)		: "Ir" (i), "m" (v->counter));	} else if (cpu_has_llsc) {		unsigned long temp;		__asm__ __volatile__(		"	.set	mips3					\n"		"1:	lld	%0, %1		# atomic64_sub		\n"		"	subu	%0, %2					\n"		"	scd	%0, %1					\n"		"	beqz	%0, 2f					\n"		"	.subsection 2					\n"		"2:	b	1b					\n"		"	.previous					\n"		"	.set	mips0					\n"		: "=&r" (temp), "=m" (v->counter)		: "Ir" (i), "m" (v->counter));	} else {		unsigned long flags;		raw_local_irq_save(flags);		v->counter -= i;		raw_local_irq_restore(flags);	}}/* * Same as above, but return the result value */static __inline__ long atomic64_add_return(long i, atomic64_t * v){	unsigned long result;	smp_llsc_mb();	if (cpu_has_llsc && R10000_LLSC_WAR) {		unsigned long temp;		__asm__ __volatile__(		"	.set	mips3					\n"		"1:	lld	%1, %2		# atomic64_add_return	\n"		"	addu	%0, %1, %3				\n"		"	scd	%0, %2					\n"		"	beqzl	%0, 1b					\n"		"	addu	%0, %1, %3				\n"		"	.set	mips0					\n"		: "=&r" (result), "=&r" (temp), "=m" (v->counter)		: "Ir" (i), "m" (v->counter)		: "memory");	} else if (cpu_has_llsc) {		unsigned long temp;		__asm__ __volatile__(		"	.set	mips3					\n"		"1:	lld	%1, %2		# atomic64_add_return	\n"		"	addu	%0, %1, %3				\n"		"	scd	%0, %2					\n"		"	beqz	%0, 2f					\n"		"	addu	%0, %1, %3				\n"		"	.subsection 2					\n"		"2:	b	1b					\n"		"	.previous					\n"		"	.set	mips0					\n"		: "=&r" (result), "=&r" (temp), "=m" (v->counter)		: "Ir" (i), "m" (v->counter)		: "memory");	} else {		unsigned long flags;		raw_local_irq_save(flags);		result = v->counter;		result += i;		v->counter = result;		raw_local_irq_restore(flags);	}	smp_llsc_mb();	return result;}static __inline__ long atomic64_sub_return(long i, atomic64_t * v){	unsigned long result;	smp_llsc_mb();	if (cpu_has_llsc && R10000_LLSC_WAR) {		unsigned long temp;		__asm__ __volatile__(		"	.set	mips3					\n"		"1:	lld	%1, %2		# atomic64_sub_return	\n"		"	subu	%0, %1, %3				\n"		"	scd	%0, %2					\n"		"	beqzl	%0, 1b					\n"		"	subu	%0, %1, %3				\n"		"	.set	mips0					\n"		: "=&r" (result), "=&r" (temp), "=m" (v->counter)		: "Ir" (i), "m" (v->counter)		: "memory");	} else if (cpu_has_llsc) {		unsigned long temp;		__asm__ __volatile__(		"	.set	mips3					\n"		"1:	lld	%1, %2		# atomic64_sub_return	\n"		"	subu	%0, %1, %3				\n"		"	scd	%0, %2					\n"		"	beqz	%0, 2f					\n"		"	subu	%0, %1, %3				\n"		"	.subsection 2					\n"		"2:	b	1b					\n"		"	.previous					\n"		"	.set	mips0					\n"		: "=&r" (result), "=&r" (temp), "=m" (v->counter)		: "Ir" (i), "m" (v->counter)		: "memory");	} else {		unsigned long flags;		raw_local_irq_save(flags);		result = v->counter;		result -= i;		v->counter = result;		raw_local_irq_restore(flags);	}	smp_llsc_mb();	return result;}/* * atomic64_sub_if_positive - conditionally subtract integer from atomic variable * @i: integer value to subtract * @v: pointer of type atomic64_t * * Atomically test @v and subtract @i if @v is greater or equal than @i. * The function returns the old value of @v minus @i. */static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v){	unsigned long result;	smp_llsc_mb();	if (cpu_has_llsc && R10000_LLSC_WAR) {		unsigned long temp;		__asm__ __volatile__(		"	.set	mips3					\n"		"1:	lld	%1, %2		# atomic64_sub_if_positive\n"		"	dsubu	%0, %1, %3				\n"		"	bltz	%0, 1f					\n"		"	scd	%0, %2					\n"		"	.set	noreorder				\n"		"	beqzl	%0, 1b					\n"		"	 dsubu	%0, %1, %3				\n"		"	.set	reorder					\n"		"1:							\n"		"	.set	mips0					\n"		: "=&r" (result), "=&r" (temp), "=m" (v->counter)		: "Ir" (i), "m" (v->counter)		: "memory");	} else if (cpu_has_llsc) {		unsigned long temp;		__asm__ __volatile__(		"	.set	mips3					\n"		"1:	lld	%1, %2		# atomic64_sub_if_positive\n"		"	dsubu	%0, %1, %3				\n"		"	bltz	%0, 1f					\n"		"	scd	%0, %2					\n"		"	.set	noreorder				\n"		"	beqz	%0, 2f					\n"		"	 dsubu	%0, %1, %3				\n"		"	.set	reorder					\n"		"1:							\n"		"	.subsection 2					\n"		"2:	b	1b					\n"		"	.previous					\n"		"	.set	mips0					\n"		: "=&r" (result), "=&r" (temp), "=m" (v->counter)		: "Ir" (i), "m" (v->counter)		: "memory");	} else {		unsigned long flags;		raw_local_irq_save(flags);		result = v->counter;		result -= i;		if (result >= 0)			v->counter = result;		raw_local_irq_restore(flags);	}	smp_llsc_mb();	return result;}#define atomic64_cmpxchg(v, o, n) \	((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))/** * atomic64_add_unless - add unless the number is a given value * @v: pointer of type atomic64_t * @a: the amount to add to v... * @u: ...unless v is equal to u. * * Atomically adds @a to @v, so long as it was not @u. * Returns non-zero if @v was not @u, and zero otherwise. */static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u){	long c, old;	c = atomic64_read(v);	for (;;) {		if (unlikely(c == (u)))			break;		old = atomic64_cmpxchg((v), c, c + (a));		if (likely(old == c))			break;		c = old;	}	return c != (u);}#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)#define atomic64_dec_return(v) atomic64_sub_return(1, (v))#define atomic64_inc_return(v) atomic64_add_return(1, (v))/* * atomic64_sub_and_test - subtract value from variable and test result * @i: integer value to subtract * @v: pointer of type atomic64_t * * Atomically subtracts @i from @v and returns * true if the result is zero, or false for all * other cases. */#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)/* * atomic64_inc_and_test - increment and test * @v: pointer of type atomic64_t * * Atomically increments @v by 1 * and returns true if the result is zero, or false for all * other cases. */#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)/* * atomic64_dec_and_test - decrement by 1 and test * @v: pointer of type atomic64_t * * Atomically decrements @v by 1 and * returns true if the result is 0, or false for all other * cases. */#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)/* * atomic64_dec_if_positive - decrement by 1 if old value positive * @v: pointer of type atomic64_t */#define atomic64_dec_if_positive(v)	atomic64_sub_if_positive(1, v)/* * atomic64_inc - increment atomic variable * @v: pointer of type atomic64_t * * Atomically increments @v by 1. */#define atomic64_inc(v) atomic64_add(1, (v))/* * atomic64_dec - decrement and test * @v: pointer of type atomic64_t * * Atomically decrements @v by 1. */#define atomic64_dec(v) atomic64_sub(1, (v))/* * atomic64_add_negative - add and test if negative * @v: pointer of type atomic64_t * @i: integer value to add * * Atomically adds @i to @v and returns true * if the result is negative, or false when * result is greater than or equal to zero. */#define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)#endif /* CONFIG_64BIT *//* * atomic*_return operations are serializing but not the non-*_return * versions. */#define smp_mb__before_atomic_dec()	smp_llsc_mb()#define smp_mb__after_atomic_dec()	smp_llsc_mb()#define smp_mb__before_atomic_inc()	smp_llsc_mb()#define smp_mb__after_atomic_inc()	smp_llsc_mb()#include <asm-generic/atomic.h>#endif /* _ASM_ATOMIC_H */

⌨️ 快捷键说明

复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?