📄 sysdep.h
字号:
#define _POPARGS_5 _POPARGS_4; popl %edi; cfi_adjust_cfa_offset (-4); \ cfi_restore (edi); L(POPDI1):#define PUSHARGS_6 _PUSHARGS_6#define DOARGS_6 _DOARGS_6 (40)#define POPARGS_6 _POPARGS_6#define _PUSHARGS_6 pushl %ebp; cfi_adjust_cfa_offset (4); \ cfi_rel_offset (ebp, 0); L(PUSHBP1): _PUSHARGS_5#define _DOARGS_6(n) movl n(%esp), %ebp; _DOARGS_5 (n-4)#define _POPARGS_6 _POPARGS_5; popl %ebp; cfi_adjust_cfa_offset (-4); \ cfi_restore (ebp); L(POPBP1):#else /* !__ASSEMBLER__ *//* We need some help from the assembler to generate optimal code. We define some macros here which later will be used. */asm (".L__X'%ebx = 1\n\t" ".L__X'%ecx = 2\n\t" ".L__X'%edx = 2\n\t" ".L__X'%eax = 3\n\t" ".L__X'%esi = 3\n\t" ".L__X'%edi = 3\n\t" ".L__X'%ebp = 3\n\t" ".L__X'%esp = 3\n\t" ".macro bpushl name reg\n\t" ".if 1 - \\name\n\t" ".if 2 - \\name\n\t" "error\n\t" ".else\n\t" "xchgl \\reg, %ebx\n\t" ".endif\n\t" ".endif\n\t" ".endm\n\t" ".macro bpopl name reg\n\t" ".if 1 - \\name\n\t" ".if 2 - \\name\n\t" "error\n\t" ".else\n\t" "xchgl \\reg, %ebx\n\t" ".endif\n\t" ".endif\n\t" ".endm\n\t");/* Define a macro which expands inline into the wrapper code for a system call. */#undef INLINE_SYSCALL#define INLINE_SYSCALL(name, nr, args...) \ ({ \ unsigned int resultvar = INTERNAL_SYSCALL (name, , nr, args); \ if (__builtin_expect (INTERNAL_SYSCALL_ERROR_P (resultvar, ), 0)) \ { \ __set_errno (INTERNAL_SYSCALL_ERRNO (resultvar, )); \ resultvar = 0xffffffff; \ } \ (int) resultvar; })/* Define a macro which expands inline into the wrapper code for a system call. This use is for internal calls that do not need to handle errors normally. It will never touch errno. This returns just what the kernel gave back. The _NCS variant allows non-constant syscall numbers but it is not possible to use more than four parameters. */#undef INTERNAL_SYSCALL#ifdef I386_USE_SYSENTER# ifdef SHARED# define INTERNAL_SYSCALL(name, err, nr, args...) \ ({ \ register unsigned int resultvar; \ EXTRAVAR_##nr \ asm volatile ( \ LOADARGS_##nr \ "movl %1, %%eax\n\t" \ "call *%%gs:%P2\n\t" \ RESTOREARGS_##nr \ : "=a" (resultvar) \ : "i" (__NR_##name), "i" (offsetof (tcbhead_t, sysinfo)) \ ASMFMT_##nr(args) : "memory", "cc"); \ (int) resultvar; })# define INTERNAL_SYSCALL_NCS(name, err, nr, args...) \ ({ \ register unsigned int resultvar; \ EXTRAVAR_##nr \ asm volatile ( \ LOADARGS_##nr \ "call *%%gs:%P2\n\t" \ RESTOREARGS_##nr \ : "=a" (resultvar) \ : "0" (name), "i" (offsetof (tcbhead_t, sysinfo)) \ ASMFMT_##nr(args) : "memory", "cc"); \ (int) resultvar; })# else# define INTERNAL_SYSCALL(name, err, nr, args...) \ ({ \ register unsigned int resultvar; \ EXTRAVAR_##nr \ asm volatile ( \ LOADARGS_##nr \ "movl %1, %%eax\n\t" \ "call *_dl_sysinfo\n\t" \ RESTOREARGS_##nr \ : "=a" (resultvar) \ : "i" (__NR_##name) ASMFMT_##nr(args) : "memory", "cc"); \ (int) resultvar; })# define INTERNAL_SYSCALL_NCS(name, err, nr, args...) \ ({ \ register unsigned int resultvar; \ EXTRAVAR_##nr \ asm volatile ( \ LOADARGS_##nr \ "call *_dl_sysinfo\n\t" \ RESTOREARGS_##nr \ : "=a" (resultvar) \ : "0" (name) ASMFMT_##nr(args) : "memory", "cc"); \ (int) resultvar; })# endif#else# define INTERNAL_SYSCALL(name, err, nr, args...) \ ({ \ register unsigned int resultvar; \ EXTRAVAR_##nr \ asm volatile ( \ LOADARGS_##nr \ "movl %1, %%eax\n\t" \ "int $0x80\n\t" \ RESTOREARGS_##nr \ : "=a" (resultvar) \ : "i" (__NR_##name) ASMFMT_##nr(args) : "memory", "cc"); \ (int) resultvar; })# define INTERNAL_SYSCALL_NCS(name, err, nr, args...) \ ({ \ register unsigned int resultvar; \ EXTRAVAR_##nr \ asm volatile ( \ LOADARGS_##nr \ "int $0x80\n\t" \ RESTOREARGS_##nr \ : "=a" (resultvar) \ : "0" (name) ASMFMT_##nr(args) : "memory", "cc"); \ (int) resultvar; })#endif#undef INTERNAL_SYSCALL_DECL#define INTERNAL_SYSCALL_DECL(err) do { } while (0)#undef INTERNAL_SYSCALL_ERROR_P#define INTERNAL_SYSCALL_ERROR_P(val, err) \ ((unsigned int) (val) >= 0xfffff001u)#undef INTERNAL_SYSCALL_ERRNO#define INTERNAL_SYSCALL_ERRNO(val, err) (-(val))#define LOADARGS_0#ifdef __PIC__# if defined I386_USE_SYSENTER && defined SHARED# define LOADARGS_1 \ "bpushl .L__X'%k3, %k3\n\t"# define LOADARGS_5 \ "movl %%ebx, %4\n\t" \ "movl %3, %%ebx\n\t"# else# define LOADARGS_1 \ "bpushl .L__X'%k2, %k2\n\t"# define LOADARGS_5 \ "movl %%ebx, %3\n\t" \ "movl %2, %%ebx\n\t"# endif# define LOADARGS_2 LOADARGS_1# define LOADARGS_3 \ "xchgl %%ebx, %%edi\n\t"# define LOADARGS_4 LOADARGS_3#else# define LOADARGS_1# define LOADARGS_2# define LOADARGS_3# define LOADARGS_4# define LOADARGS_5#endif#define RESTOREARGS_0#ifdef __PIC__# if defined I386_USE_SYSENTER && defined SHARED# define RESTOREARGS_1 \ "bpopl .L__X'%k3, %k3\n\t"# define RESTOREARGS_5 \ "movl %4, %%ebx"# else# define RESTOREARGS_1 \ "bpopl .L__X'%k2, %k2\n\t"# define RESTOREARGS_5 \ "movl %3, %%ebx"# endif# define RESTOREARGS_2 RESTOREARGS_1# define RESTOREARGS_3 \ "xchgl %%edi, %%ebx\n\t"# define RESTOREARGS_4 RESTOREARGS_3#else# define RESTOREARGS_1# define RESTOREARGS_2# define RESTOREARGS_3# define RESTOREARGS_4# define RESTOREARGS_5#endif#define ASMFMT_0()#ifdef __PIC__# define ASMFMT_1(arg1) \ , "cd" (arg1)# define ASMFMT_2(arg1, arg2) \ , "d" (arg1), "c" (arg2)# define ASMFMT_3(arg1, arg2, arg3) \ , "D" (arg1), "c" (arg2), "d" (arg3)# define ASMFMT_4(arg1, arg2, arg3, arg4) \ , "D" (arg1), "c" (arg2), "d" (arg3), "S" (arg4)# define ASMFMT_5(arg1, arg2, arg3, arg4, arg5) \ , "0" (arg1), "m" (_xv), "c" (arg2), "d" (arg3), "S" (arg4), "D" (arg5)#else# define ASMFMT_1(arg1) \ , "b" (arg1)# define ASMFMT_2(arg1, arg2) \ , "b" (arg1), "c" (arg2)# define ASMFMT_3(arg1, arg2, arg3) \ , "b" (arg1), "c" (arg2), "d" (arg3)# define ASMFMT_4(arg1, arg2, arg3, arg4) \ , "b" (arg1), "c" (arg2), "d" (arg3), "S" (arg4)# define ASMFMT_5(arg1, arg2, arg3, arg4, arg5) \ , "b" (arg1), "c" (arg2), "d" (arg3), "S" (arg4), "D" (arg5)#endif#define EXTRAVAR_0#define EXTRAVAR_1#define EXTRAVAR_2#define EXTRAVAR_3#define EXTRAVAR_4#ifdef __PIC__# define EXTRAVAR_5 int _xv;#else# define EXTRAVAR_5#endif/* Consistency check for position-independent code. */#ifdef __PIC__# define check_consistency() \ ({ int __res; \ __asm__ __volatile__ \ ("call __i686.get_pc_thunk.cx;" \ "addl $_GLOBAL_OFFSET_TABLE_, %%ecx;" \ "subl %%ebx, %%ecx;" \ "je 1f;" \ "ud2;" \ "1:\n" \ ".section .gnu.linkonce.t.__i686.get_pc_thunk.cx,\"ax\",@progbits;" \ ".globl __i686.get_pc_thunk.cx;" \ ".hidden __i686.get_pc_thunk.cx;" \ ".type __i686.get_pc_thunk.cx,@function;" \ "__i686.get_pc_thunk.cx:" \ "movl (%%esp), %%ecx;" \ "ret;" \ ".previous" \ : "=c" (__res)); \ __res; })#endif#endif /* __ASSEMBLER__ *//* Pointer mangling support. */#if defined NOT_IN_libc && defined IS_IN_rtld/* We cannot use the thread descriptor because in ld.so we use setjmp earlier than the descriptor is initialized. Using a global variable is too complicated here since we have no PC-relative addressing mode. */#else# ifdef __ASSEMBLER__# define PTR_MANGLE(reg) xorl %gs:POINTER_GUARD, reg; \ roll $9, reg# define PTR_DEMANGLE(reg) rorl $9, reg; \ xorl %gs:POINTER_GUARD, reg# else# define PTR_MANGLE(var) asm ("xorl %%gs:%c2, %0\n" \ "roll $9, %0" \ : "=r" (var) \ : "0" (var), \ "i" (offsetof (tcbhead_t, \ pointer_guard)))# define PTR_DEMANGLE(var) asm ("rorl $9, %0\n" \ "xorl %%gs:%c2, %0" \ : "=r" (var) \ : "0" (var), \ "i" (offsetof (tcbhead_t, \ pointer_guard)))# endif#endif#endif /* linux/i386/sysdep.h */
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -