📄 uaccess.h
字号:
/* We let the __ versions of copy_from/to_user inline, because they're often * used in fast paths and have only a small space overhead. */static inline unsigned long__generic_copy_from_user_nocheck(void *to, const void *from, unsigned long n){ __copy_user_zeroing(to,from,n); return n;}static inline unsigned long__generic_copy_to_user_nocheck(void *to, const void *from, unsigned long n){ __copy_user(to,from,n); return n;}/* Optimize just a little bit when we know the size of the move. */#define __constant_copy_user(to, from, size) \do { \ int __d0, __d1; \ switch (size & 3) { \ default: \ __asm__ __volatile__( \ "0: rep; movsl\n" \ "1:\n" \ ".section .fixup,\"ax\"\n" \ "2: shl $2,%0\n" \ " jmp 1b\n" \ ".previous\n" \ ".section __ex_table,\"a\"\n" \ " .align 4\n" \ " .long 0b,2b\n" \ ".previous" \ : "=c"(size), "=&S" (__d0), "=&D" (__d1)\ : "1"(from), "2"(to), "0"(size/4) \ : "memory"); \ break; \ case 1: \ __asm__ __volatile__( \ "0: rep; movsl\n" \ "1: movsb\n" \ "2:\n" \ ".section .fixup,\"ax\"\n" \ "3: shl $2,%0\n" \ "4: incl %0\n" \ " jmp 2b\n" \ ".previous\n" \ ".section __ex_table,\"a\"\n" \ " .align 4\n" \ " .long 0b,3b\n" \ " .long 1b,4b\n" \ ".previous" \ : "=c"(size), "=&S" (__d0), "=&D" (__d1)\ : "1"(from), "2"(to), "0"(size/4) \ : "memory"); \ break; \ case 2: \ __asm__ __volatile__( \ "0: rep; movsl\n" \ "1: movsw\n" \ "2:\n" \ ".section .fixup,\"ax\"\n" \ "3: shl $2,%0\n" \ "4: addl $2,%0\n" \ " jmp 2b\n" \ ".previous\n" \ ".section __ex_table,\"a\"\n" \ " .align 4\n" \ " .long 0b,3b\n" \ " .long 1b,4b\n" \ ".previous" \ : "=c"(size), "=&S" (__d0), "=&D" (__d1)\ : "1"(from), "2"(to), "0"(size/4) \ : "memory"); \ break; \ case 3: \ __asm__ __volatile__( \ "0: rep; movsl\n" \ "1: movsw\n" \ "2: movsb\n" \ "3:\n" \ ".section .fixup,\"ax\"\n" \ "4: shl $2,%0\n" \ "5: addl $2,%0\n" \ "6: incl %0\n" \ " jmp 3b\n" \ ".previous\n" \ ".section __ex_table,\"a\"\n" \ " .align 4\n" \ " .long 0b,4b\n" \ " .long 1b,5b\n" \ " .long 2b,6b\n" \ ".previous" \ : "=c"(size), "=&S" (__d0), "=&D" (__d1)\ : "1"(from), "2"(to), "0"(size/4) \ : "memory"); \ break; \ } \} while (0)/* Optimize just a little bit when we know the size of the move. */#define __constant_copy_user_zeroing(to, from, size) \do { \ int __d0, __d1; \ switch (size & 3) { \ default: \ __asm__ __volatile__( \ "0: rep; movsl\n" \ "1:\n" \ ".section .fixup,\"ax\"\n" \ "2: pushl %0\n" \ " pushl %%eax\n" \ " xorl %%eax,%%eax\n" \ " rep; stosl\n" \ " popl %%eax\n" \ " popl %0\n" \ " shl $2,%0\n" \ " jmp 1b\n" \ ".previous\n" \ ".section __ex_table,\"a\"\n" \ " .align 4\n" \ " .long 0b,2b\n" \ ".previous" \ : "=c"(size), "=&S" (__d0), "=&D" (__d1)\ : "1"(from), "2"(to), "0"(size/4) \ : "memory"); \ break; \ case 1: \ __asm__ __volatile__( \ "0: rep; movsl\n" \ "1: movsb\n" \ "2:\n" \ ".section .fixup,\"ax\"\n" \ "3: pushl %0\n" \ " pushl %%eax\n" \ " xorl %%eax,%%eax\n" \ " rep; stosl\n" \ " stosb\n" \ " popl %%eax\n" \ " popl %0\n" \ " shl $2,%0\n" \ " incl %0\n" \ " jmp 2b\n" \ "4: pushl %%eax\n" \ " xorl %%eax,%%eax\n" \ " stosb\n" \ " popl %%eax\n" \ " incl %0\n" \ " jmp 2b\n" \ ".previous\n" \ ".section __ex_table,\"a\"\n" \ " .align 4\n" \ " .long 0b,3b\n" \ " .long 1b,4b\n" \ ".previous" \ : "=c"(size), "=&S" (__d0), "=&D" (__d1)\ : "1"(from), "2"(to), "0"(size/4) \ : "memory"); \ break; \ case 2: \ __asm__ __volatile__( \ "0: rep; movsl\n" \ "1: movsw\n" \ "2:\n" \ ".section .fixup,\"ax\"\n" \ "3: pushl %0\n" \ " pushl %%eax\n" \ " xorl %%eax,%%eax\n" \ " rep; stosl\n" \ " stosw\n" \ " popl %%eax\n" \ " popl %0\n" \ " shl $2,%0\n" \ " addl $2,%0\n" \ " jmp 2b\n" \ "4: pushl %%eax\n" \ " xorl %%eax,%%eax\n" \ " stosw\n" \ " popl %%eax\n" \ " addl $2,%0\n" \ " jmp 2b\n" \ ".previous\n" \ ".section __ex_table,\"a\"\n" \ " .align 4\n" \ " .long 0b,3b\n" \ " .long 1b,4b\n" \ ".previous" \ : "=c"(size), "=&S" (__d0), "=&D" (__d1)\ : "1"(from), "2"(to), "0"(size/4) \ : "memory"); \ break; \ case 3: \ __asm__ __volatile__( \ "0: rep; movsl\n" \ "1: movsw\n" \ "2: movsb\n" \ "3:\n" \ ".section .fixup,\"ax\"\n" \ "4: pushl %0\n" \ " pushl %%eax\n" \ " xorl %%eax,%%eax\n" \ " rep; stosl\n" \ " stosw\n" \ " stosb\n" \ " popl %%eax\n" \ " popl %0\n" \ " shl $2,%0\n" \ " addl $3,%0\n" \ " jmp 2b\n" \ "5: pushl %%eax\n" \ " xorl %%eax,%%eax\n" \ " stosw\n" \ " stosb\n" \ " popl %%eax\n" \ " addl $3,%0\n" \ " jmp 2b\n" \ "6: pushl %%eax\n" \ " xorl %%eax,%%eax\n" \ " stosb\n" \ " popl %%eax\n" \ " incl %0\n" \ " jmp 3b\n" \ ".previous\n" \ ".section __ex_table,\"a\"\n" \ " .align 4\n" \ " .long 0b,4b\n" \ " .long 1b,5b\n" \ " .long 2b,6b\n" \ ".previous" \ : "=c"(size), "=&S" (__d0), "=&D" (__d1)\ : "1"(from), "2"(to), "0"(size/4) \ : "memory"); \ break; \ } \} while (0)unsigned long __generic_copy_to_user(void *, const void *, unsigned long);unsigned long __generic_copy_from_user(void *, const void *, unsigned long);static inline unsigned long__constant_copy_to_user(void *to, const void *from, unsigned long n){ prefetch(from); if (access_ok(VERIFY_WRITE, to, n)) __constant_copy_user(to,from,n); return n;}static inline unsigned long__constant_copy_from_user(void *to, const void *from, unsigned long n){ if (access_ok(VERIFY_READ, from, n)) __constant_copy_user_zeroing(to,from,n); else memset(to, 0, n); return n;}static inline unsigned long__constant_copy_to_user_nocheck(void *to, const void *from, unsigned long n){ __constant_copy_user(to,from,n); return n;}static inline unsigned long__constant_copy_from_user_nocheck(void *to, const void *from, unsigned long n){ __constant_copy_user_zeroing(to,from,n); return n;}#define copy_to_user(to,from,n) \ (__builtin_constant_p(n) ? \ __constant_copy_to_user((to),(from),(n)) : \ __generic_copy_to_user((to),(from),(n)))#define copy_from_user(to,from,n) \ (__builtin_constant_p(n) ? \ __constant_copy_from_user((to),(from),(n)) : \ __generic_copy_from_user((to),(from),(n)))#define __copy_to_user(to,from,n) \ (__builtin_constant_p(n) ? \ __constant_copy_to_user_nocheck((to),(from),(n)) : \ __generic_copy_to_user_nocheck((to),(from),(n)))#define __copy_from_user(to,from,n) \ (__builtin_constant_p(n) ? \ __constant_copy_from_user_nocheck((to),(from),(n)) : \ __generic_copy_from_user_nocheck((to),(from),(n)))long strncpy_from_user(char *dst, const char *src, long count);long __strncpy_from_user(char *dst, const char *src, long count);#define strlen_user(str) strnlen_user(str, ~0UL >> 1)long strnlen_user(const char *str, long n);unsigned long clear_user(void *mem, unsigned long len);unsigned long __clear_user(void *mem, unsigned long len);#endif /* __i386_UACCESS_H */
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -