system.h
来自「Linux Kernel 2.6.9 for OMAP1710」· C头文件 代码 · 共 625 行 · 第 1/2 页
H
625 行
#define local_irq_save(flags) do { (flags) = swpipl(IPL_MAX); barrier(); } while(0)#define local_irq_restore(flags) do { barrier(); setipl(flags); barrier(); } while(0)#define irqs_disabled() (getipl() == IPL_MAX)/* * TB routines.. */#define __tbi(nr,arg,arg1...) \({ \ register unsigned long __r16 __asm__("$16") = (nr); \ register unsigned long __r17 __asm__("$17"); arg; \ __asm__ __volatile__( \ "call_pal %3 #__tbi" \ :"=r" (__r16),"=r" (__r17) \ :"0" (__r16),"i" (PAL_tbi) ,##arg1 \ :"$0", "$1", "$22", "$23", "$24", "$25"); \})#define tbi(x,y) __tbi(x,__r17=(y),"1" (__r17))#define tbisi(x) __tbi(1,__r17=(x),"1" (__r17))#define tbisd(x) __tbi(2,__r17=(x),"1" (__r17))#define tbis(x) __tbi(3,__r17=(x),"1" (__r17))#define tbiap() __tbi(-1, /* no second argument */)#define tbia() __tbi(-2, /* no second argument */)/* * Atomic exchange. * Since it can be used to implement critical sections * it must clobber "memory" (also for interrupts in UP). */static inline unsigned long__xchg_u8(volatile char *m, unsigned long val){ unsigned long ret, tmp, addr64; __asm__ __volatile__( " andnot %4,7,%3\n" " insbl %1,%4,%1\n" "1: ldq_l %2,0(%3)\n" " extbl %2,%4,%0\n" " mskbl %2,%4,%2\n" " or %1,%2,%2\n" " stq_c %2,0(%3)\n" " beq %2,2f\n"#ifdef CONFIG_SMP " mb\n"#endif ".subsection 2\n" "2: br 1b\n" ".previous" : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64) : "r" ((long)m), "1" (val) : "memory"); return ret;}static inline unsigned long__xchg_u16(volatile short *m, unsigned long val){ unsigned long ret, tmp, addr64; __asm__ __volatile__( " andnot %4,7,%3\n" " inswl %1,%4,%1\n" "1: ldq_l %2,0(%3)\n" " extwl %2,%4,%0\n" " mskwl %2,%4,%2\n" " or %1,%2,%2\n" " stq_c %2,0(%3)\n" " beq %2,2f\n"#ifdef CONFIG_SMP " mb\n"#endif ".subsection 2\n" "2: br 1b\n" ".previous" : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64) : "r" ((long)m), "1" (val) : "memory"); return ret;}static inline unsigned long__xchg_u32(volatile int *m, unsigned long val){ unsigned long dummy; __asm__ __volatile__( "1: ldl_l %0,%4\n" " bis $31,%3,%1\n" " stl_c %1,%2\n" " beq %1,2f\n"#ifdef CONFIG_SMP " mb\n"#endif ".subsection 2\n" "2: br 1b\n" ".previous" : "=&r" (val), "=&r" (dummy), "=m" (*m) : "rI" (val), "m" (*m) : "memory"); return val;}static inline unsigned long__xchg_u64(volatile long *m, unsigned long val){ unsigned long dummy; __asm__ __volatile__( "1: ldq_l %0,%4\n" " bis $31,%3,%1\n" " stq_c %1,%2\n" " beq %1,2f\n"#ifdef CONFIG_SMP " mb\n"#endif ".subsection 2\n" "2: br 1b\n" ".previous" : "=&r" (val), "=&r" (dummy), "=m" (*m) : "rI" (val), "m" (*m) : "memory"); return val;}/* This function doesn't exist, so you'll get a linker error if something tries to do an invalid xchg(). */extern void __xchg_called_with_bad_pointer(void);static inline unsigned long__xchg(volatile void *ptr, unsigned long x, int size){ switch (size) { case 1: return __xchg_u8(ptr, x); case 2: return __xchg_u16(ptr, x); case 4: return __xchg_u32(ptr, x); case 8: return __xchg_u64(ptr, x); } __xchg_called_with_bad_pointer(); return x;}#define xchg(ptr,x) \ ({ \ __typeof__(*(ptr)) _x_ = (x); \ (__typeof__(*(ptr))) __xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \ })#define tas(ptr) (xchg((ptr),1))/* * Atomic compare and exchange. Compare OLD with MEM, if identical, * store NEW in MEM. Return the initial value in MEM. Success is * indicated by comparing RETURN with OLD. * * The memory barrier should be placed in SMP only when we actually * make the change. If we don't change anything (so if the returned * prev is equal to old) then we aren't acquiring anything new and * we don't need any memory barrier as far I can tell. */#define __HAVE_ARCH_CMPXCHG 1static inline unsigned long__cmpxchg_u8(volatile char *m, long old, long new){ unsigned long prev, tmp, cmp, addr64; __asm__ __volatile__( " andnot %5,7,%4\n" " insbl %1,%5,%1\n" "1: ldq_l %2,0(%4)\n" " extbl %2,%5,%0\n" " cmpeq %0,%6,%3\n" " beq %3,2f\n" " mskbl %2,%5,%2\n" " or %1,%2,%2\n" " stq_c %2,0(%4)\n" " beq %2,3f\n"#ifdef CONFIG_SMP " mb\n"#endif "2:\n" ".subsection 2\n" "3: br 1b\n" ".previous" : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64) : "r" ((long)m), "Ir" (old), "1" (new) : "memory"); return prev;}static inline unsigned long__cmpxchg_u16(volatile short *m, long old, long new){ unsigned long prev, tmp, cmp, addr64; __asm__ __volatile__( " andnot %5,7,%4\n" " inswl %1,%5,%1\n" "1: ldq_l %2,0(%4)\n" " extwl %2,%5,%0\n" " cmpeq %0,%6,%3\n" " beq %3,2f\n" " mskwl %2,%5,%2\n" " or %1,%2,%2\n" " stq_c %2,0(%4)\n" " beq %2,3f\n"#ifdef CONFIG_SMP " mb\n"#endif "2:\n" ".subsection 2\n" "3: br 1b\n" ".previous" : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64) : "r" ((long)m), "Ir" (old), "1" (new) : "memory"); return prev;}static inline unsigned long__cmpxchg_u32(volatile int *m, int old, int new){ unsigned long prev, cmp; __asm__ __volatile__( "1: ldl_l %0,%5\n" " cmpeq %0,%3,%1\n" " beq %1,2f\n" " mov %4,%1\n" " stl_c %1,%2\n" " beq %1,3f\n"#ifdef CONFIG_SMP " mb\n"#endif "2:\n" ".subsection 2\n" "3: br 1b\n" ".previous" : "=&r"(prev), "=&r"(cmp), "=m"(*m) : "r"((long) old), "r"(new), "m"(*m) : "memory"); return prev;}static inline unsigned long__cmpxchg_u64(volatile long *m, unsigned long old, unsigned long new){ unsigned long prev, cmp; __asm__ __volatile__( "1: ldq_l %0,%5\n" " cmpeq %0,%3,%1\n" " beq %1,2f\n" " mov %4,%1\n" " stq_c %1,%2\n" " beq %1,3f\n"#ifdef CONFIG_SMP " mb\n"#endif "2:\n" ".subsection 2\n" "3: br 1b\n" ".previous" : "=&r"(prev), "=&r"(cmp), "=m"(*m) : "r"((long) old), "r"(new), "m"(*m) : "memory"); return prev;}/* This function doesn't exist, so you'll get a linker error if something tries to do an invalid cmpxchg(). */extern void __cmpxchg_called_with_bad_pointer(void);static inline unsigned long__cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size){ switch (size) { case 1: return __cmpxchg_u8(ptr, old, new); case 2: return __cmpxchg_u16(ptr, old, new); case 4: return __cmpxchg_u32(ptr, old, new); case 8: return __cmpxchg_u64(ptr, old, new); } __cmpxchg_called_with_bad_pointer(); return old;}#define cmpxchg(ptr,o,n) \ ({ \ __typeof__(*(ptr)) _o_ = (o); \ __typeof__(*(ptr)) _n_ = (n); \ (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \ (unsigned long)_n_, sizeof(*(ptr))); \ })#endif /* __ASSEMBLY__ */#endif
⌨️ 快捷键说明
复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?