⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 system.h

📁 linux 内核源代码
💻 H
📖 第 1 页 / 共 2 页
字号:
}/* This function doesn't exist, so you'll get a linker error   if something tries to do an invalid xchg().  */extern void __xchg_called_with_bad_pointer(void);#define __xchg(ptr, x, size) \({ \	unsigned long __xchg__res; \	volatile void *__xchg__ptr = (ptr); \	switch (size) { \		case 1: __xchg__res = __xchg_u8(__xchg__ptr, x); break; \		case 2: __xchg__res = __xchg_u16(__xchg__ptr, x); break; \		case 4: __xchg__res = __xchg_u32(__xchg__ptr, x); break; \		case 8: __xchg__res = __xchg_u64(__xchg__ptr, x); break; \		default: __xchg_called_with_bad_pointer(); __xchg__res = x; \	} \	__xchg__res; \})#define xchg(ptr,x)							     \  ({									     \     __typeof__(*(ptr)) _x_ = (x);					     \     (__typeof__(*(ptr))) __xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \  })static inline unsigned long__xchg_u8_local(volatile char *m, unsigned long val){	unsigned long ret, tmp, addr64;	__asm__ __volatile__(	"	andnot	%4,7,%3\n"	"	insbl	%1,%4,%1\n"	"1:	ldq_l	%2,0(%3)\n"	"	extbl	%2,%4,%0\n"	"	mskbl	%2,%4,%2\n"	"	or	%1,%2,%2\n"	"	stq_c	%2,0(%3)\n"	"	beq	%2,2f\n"	".subsection 2\n"	"2:	br	1b\n"	".previous"	: "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)	: "r" ((long)m), "1" (val) : "memory");	return ret;}static inline unsigned long__xchg_u16_local(volatile short *m, unsigned long val){	unsigned long ret, tmp, addr64;	__asm__ __volatile__(	"	andnot	%4,7,%3\n"	"	inswl	%1,%4,%1\n"	"1:	ldq_l	%2,0(%3)\n"	"	extwl	%2,%4,%0\n"	"	mskwl	%2,%4,%2\n"	"	or	%1,%2,%2\n"	"	stq_c	%2,0(%3)\n"	"	beq	%2,2f\n"	".subsection 2\n"	"2:	br	1b\n"	".previous"	: "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)	: "r" ((long)m), "1" (val) : "memory");	return ret;}static inline unsigned long__xchg_u32_local(volatile int *m, unsigned long val){	unsigned long dummy;	__asm__ __volatile__(	"1:	ldl_l %0,%4\n"	"	bis $31,%3,%1\n"	"	stl_c %1,%2\n"	"	beq %1,2f\n"	".subsection 2\n"	"2:	br 1b\n"	".previous"	: "=&r" (val), "=&r" (dummy), "=m" (*m)	: "rI" (val), "m" (*m) : "memory");	return val;}static inline unsigned long__xchg_u64_local(volatile long *m, unsigned long val){	unsigned long dummy;	__asm__ __volatile__(	"1:	ldq_l %0,%4\n"	"	bis $31,%3,%1\n"	"	stq_c %1,%2\n"	"	beq %1,2f\n"	".subsection 2\n"	"2:	br 1b\n"	".previous"	: "=&r" (val), "=&r" (dummy), "=m" (*m)	: "rI" (val), "m" (*m) : "memory");	return val;}#define __xchg_local(ptr, x, size) \({ \	unsigned long __xchg__res; \	volatile void *__xchg__ptr = (ptr); \	switch (size) { \		case 1: __xchg__res = __xchg_u8_local(__xchg__ptr, x); break; \		case 2: __xchg__res = __xchg_u16_local(__xchg__ptr, x); break; \		case 4: __xchg__res = __xchg_u32_local(__xchg__ptr, x); break; \		case 8: __xchg__res = __xchg_u64_local(__xchg__ptr, x); break; \		default: __xchg_called_with_bad_pointer(); __xchg__res = x; \	} \	__xchg__res; \})#define xchg_local(ptr,x)						     \  ({									     \     __typeof__(*(ptr)) _x_ = (x);					     \     (__typeof__(*(ptr))) __xchg_local((ptr), (unsigned long)_x_,	     \     		sizeof(*(ptr))); \  })/*  * Atomic compare and exchange.  Compare OLD with MEM, if identical, * store NEW in MEM.  Return the initial value in MEM.  Success is * indicated by comparing RETURN with OLD. * * The memory barrier should be placed in SMP only when we actually * make the change. If we don't change anything (so if the returned * prev is equal to old) then we aren't acquiring anything new and * we don't need any memory barrier as far I can tell. */#define __HAVE_ARCH_CMPXCHG 1static inline unsigned long__cmpxchg_u8(volatile char *m, long old, long new){	unsigned long prev, tmp, cmp, addr64;	__asm__ __volatile__(	"	andnot	%5,7,%4\n"	"	insbl	%1,%5,%1\n"	"1:	ldq_l	%2,0(%4)\n"	"	extbl	%2,%5,%0\n"	"	cmpeq	%0,%6,%3\n"	"	beq	%3,2f\n"	"	mskbl	%2,%5,%2\n"	"	or	%1,%2,%2\n"	"	stq_c	%2,0(%4)\n"	"	beq	%2,3f\n"#ifdef CONFIG_SMP	"	mb\n"#endif	"2:\n"	".subsection 2\n"	"3:	br	1b\n"	".previous"	: "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)	: "r" ((long)m), "Ir" (old), "1" (new) : "memory");	return prev;}static inline unsigned long__cmpxchg_u16(volatile short *m, long old, long new){	unsigned long prev, tmp, cmp, addr64;	__asm__ __volatile__(	"	andnot	%5,7,%4\n"	"	inswl	%1,%5,%1\n"	"1:	ldq_l	%2,0(%4)\n"	"	extwl	%2,%5,%0\n"	"	cmpeq	%0,%6,%3\n"	"	beq	%3,2f\n"	"	mskwl	%2,%5,%2\n"	"	or	%1,%2,%2\n"	"	stq_c	%2,0(%4)\n"	"	beq	%2,3f\n"#ifdef CONFIG_SMP	"	mb\n"#endif	"2:\n"	".subsection 2\n"	"3:	br	1b\n"	".previous"	: "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)	: "r" ((long)m), "Ir" (old), "1" (new) : "memory");	return prev;}static inline unsigned long__cmpxchg_u32(volatile int *m, int old, int new){	unsigned long prev, cmp;	__asm__ __volatile__(	"1:	ldl_l %0,%5\n"	"	cmpeq %0,%3,%1\n"	"	beq %1,2f\n"	"	mov %4,%1\n"	"	stl_c %1,%2\n"	"	beq %1,3f\n"#ifdef CONFIG_SMP	"	mb\n"#endif	"2:\n"	".subsection 2\n"	"3:	br 1b\n"	".previous"	: "=&r"(prev), "=&r"(cmp), "=m"(*m)	: "r"((long) old), "r"(new), "m"(*m) : "memory");	return prev;}static inline unsigned long__cmpxchg_u64(volatile long *m, unsigned long old, unsigned long new){	unsigned long prev, cmp;	__asm__ __volatile__(	"1:	ldq_l %0,%5\n"	"	cmpeq %0,%3,%1\n"	"	beq %1,2f\n"	"	mov %4,%1\n"	"	stq_c %1,%2\n"	"	beq %1,3f\n"#ifdef CONFIG_SMP	"	mb\n"#endif	"2:\n"	".subsection 2\n"	"3:	br 1b\n"	".previous"	: "=&r"(prev), "=&r"(cmp), "=m"(*m)	: "r"((long) old), "r"(new), "m"(*m) : "memory");	return prev;}/* This function doesn't exist, so you'll get a linker error   if something tries to do an invalid cmpxchg().  */extern void __cmpxchg_called_with_bad_pointer(void);static __always_inline unsigned long__cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size){	switch (size) {		case 1:			return __cmpxchg_u8(ptr, old, new);		case 2:			return __cmpxchg_u16(ptr, old, new);		case 4:			return __cmpxchg_u32(ptr, old, new);		case 8:			return __cmpxchg_u64(ptr, old, new);	}	__cmpxchg_called_with_bad_pointer();	return old;}#define cmpxchg(ptr,o,n)						 \  ({									 \     __typeof__(*(ptr)) _o_ = (o);					 \     __typeof__(*(ptr)) _n_ = (n);					 \     (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_,		 \				    (unsigned long)_n_, sizeof(*(ptr))); \  })static inline unsigned long__cmpxchg_u8_local(volatile char *m, long old, long new){	unsigned long prev, tmp, cmp, addr64;	__asm__ __volatile__(	"	andnot	%5,7,%4\n"	"	insbl	%1,%5,%1\n"	"1:	ldq_l	%2,0(%4)\n"	"	extbl	%2,%5,%0\n"	"	cmpeq	%0,%6,%3\n"	"	beq	%3,2f\n"	"	mskbl	%2,%5,%2\n"	"	or	%1,%2,%2\n"	"	stq_c	%2,0(%4)\n"	"	beq	%2,3f\n"	"2:\n"	".subsection 2\n"	"3:	br	1b\n"	".previous"	: "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)	: "r" ((long)m), "Ir" (old), "1" (new) : "memory");	return prev;}static inline unsigned long__cmpxchg_u16_local(volatile short *m, long old, long new){	unsigned long prev, tmp, cmp, addr64;	__asm__ __volatile__(	"	andnot	%5,7,%4\n"	"	inswl	%1,%5,%1\n"	"1:	ldq_l	%2,0(%4)\n"	"	extwl	%2,%5,%0\n"	"	cmpeq	%0,%6,%3\n"	"	beq	%3,2f\n"	"	mskwl	%2,%5,%2\n"	"	or	%1,%2,%2\n"	"	stq_c	%2,0(%4)\n"	"	beq	%2,3f\n"	"2:\n"	".subsection 2\n"	"3:	br	1b\n"	".previous"	: "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)	: "r" ((long)m), "Ir" (old), "1" (new) : "memory");	return prev;}static inline unsigned long__cmpxchg_u32_local(volatile int *m, int old, int new){	unsigned long prev, cmp;	__asm__ __volatile__(	"1:	ldl_l %0,%5\n"	"	cmpeq %0,%3,%1\n"	"	beq %1,2f\n"	"	mov %4,%1\n"	"	stl_c %1,%2\n"	"	beq %1,3f\n"	"2:\n"	".subsection 2\n"	"3:	br 1b\n"	".previous"	: "=&r"(prev), "=&r"(cmp), "=m"(*m)	: "r"((long) old), "r"(new), "m"(*m) : "memory");	return prev;}static inline unsigned long__cmpxchg_u64_local(volatile long *m, unsigned long old, unsigned long new){	unsigned long prev, cmp;	__asm__ __volatile__(	"1:	ldq_l %0,%5\n"	"	cmpeq %0,%3,%1\n"	"	beq %1,2f\n"	"	mov %4,%1\n"	"	stq_c %1,%2\n"	"	beq %1,3f\n"	"2:\n"	".subsection 2\n"	"3:	br 1b\n"	".previous"	: "=&r"(prev), "=&r"(cmp), "=m"(*m)	: "r"((long) old), "r"(new), "m"(*m) : "memory");	return prev;}static __always_inline unsigned long__cmpxchg_local(volatile void *ptr, unsigned long old, unsigned long new,		int size){	switch (size) {		case 1:			return __cmpxchg_u8_local(ptr, old, new);		case 2:			return __cmpxchg_u16_local(ptr, old, new);		case 4:			return __cmpxchg_u32_local(ptr, old, new);		case 8:			return __cmpxchg_u64_local(ptr, old, new);	}	__cmpxchg_called_with_bad_pointer();	return old;}#define cmpxchg_local(ptr,o,n)						 \  ({									 \     __typeof__(*(ptr)) _o_ = (o);					 \     __typeof__(*(ptr)) _n_ = (n);					 \     (__typeof__(*(ptr))) __cmpxchg_local((ptr), (unsigned long)_o_,	 \				    (unsigned long)_n_, sizeof(*(ptr))); \  })#endif /* __ASSEMBLY__ */#define arch_align_stack(x) (x)#endif

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -