📄 processor.h
字号:
return this_cpu(cr4);}static inline void write_cr4(unsigned long val){ this_cpu(cr4) = val; asm volatile ( "mov %0,%%cr4" : : "r" (val) );}/* Clear and set 'TS' bit respectively */static inline void clts(void) { asm volatile ( "clts" );}static inline void stts(void) { write_cr0(X86_CR0_TS|read_cr0());}/* * Save the cr4 feature set we're using (ie * Pentium 4MB enable and PPro Global page * enable), so that any CPU's that boot up * after us can get the correct flags. */extern unsigned long mmu_cr4_features;static always_inline void set_in_cr4 (unsigned long mask){ mmu_cr4_features |= mask; write_cr4(read_cr4() | mask);}static always_inline void clear_in_cr4 (unsigned long mask){ mmu_cr4_features &= ~mask; write_cr4(read_cr4() & ~mask);}/* * NSC/Cyrix CPU configuration register indexes */#define CX86_PCR0 0x20#define CX86_GCR 0xb8#define CX86_CCR0 0xc0#define CX86_CCR1 0xc1#define CX86_CCR2 0xc2#define CX86_CCR3 0xc3#define CX86_CCR4 0xe8#define CX86_CCR5 0xe9#define CX86_CCR6 0xea#define CX86_CCR7 0xeb#define CX86_PCR1 0xf0#define CX86_DIR0 0xfe#define CX86_DIR1 0xff#define CX86_ARR_BASE 0xc4#define CX86_RCR_BASE 0xdc/* * NSC/Cyrix CPU indexed register access macros */#define getCx86(reg) ({ outb((reg), 0x22); inb(0x23); })#define setCx86(reg, data) do { \ outb((reg), 0x22); \ outb((data), 0x23); \} while (0)/* Stop speculative execution */static inline void sync_core(void){ int tmp; asm volatile ( "cpuid" : "=a" (tmp) : "0" (1) : "ebx","ecx","edx","memory" );}static always_inline void __monitor(const void *eax, unsigned long ecx, unsigned long edx){ /* "monitor %eax,%ecx,%edx;" */ asm volatile ( ".byte 0x0f,0x01,0xc8;" : : "a" (eax), "c" (ecx), "d"(edx) );}static always_inline void __mwait(unsigned long eax, unsigned long ecx){ /* "mwait %eax,%ecx;" */ asm volatile ( ".byte 0x0f,0x01,0xc9;" : : "a" (eax), "c" (ecx) );}#define IOBMP_BYTES 8192#define IOBMP_INVALID_OFFSET 0x8000struct tss_struct { unsigned short back_link,__blh;#ifdef __x86_64__ union { u64 rsp0, esp0; }; union { u64 rsp1, esp1; }; union { u64 rsp2, esp2; }; u64 reserved1; u64 ist[7]; u64 reserved2; u16 reserved3;#else u32 esp0; u16 ss0,__ss0h; u32 esp1; u16 ss1,__ss1h; u32 esp2; u16 ss2,__ss2h; u32 __cr3; u32 eip; u32 eflags; u32 eax,ecx,edx,ebx; u32 esp; u32 ebp; u32 esi; u32 edi; u16 es, __esh; u16 cs, __csh; u16 ss, __ssh; u16 ds, __dsh; u16 fs, __fsh; u16 gs, __gsh; u16 ldt, __ldth; u16 trace;#endif u16 bitmap; /* Pads the TSS to be cacheline-aligned (total size is 0x80). */ u8 __cacheline_filler[24];} __cacheline_aligned __attribute__((packed));#ifdef __x86_64__# define IST_DF 1UL# define IST_NMI 2UL# define IST_MCE 3UL# define IST_MAX 3UL#endif#define IDT_ENTRIES 256extern idt_entry_t idt_table[];extern idt_entry_t *idt_tables[];extern struct tss_struct init_tss[NR_CPUS];extern void init_int80_direct_trap(struct vcpu *v);#if defined(CONFIG_X86_32)#define set_int80_direct_trap(_ed) \ (memcpy(idt_tables[(_ed)->processor] + 0x80, \ &((_ed)->arch.int80_desc), 8))#else#define set_int80_direct_trap(_ed) ((void)0)#endifextern int gpf_emulate_4gb(struct cpu_user_regs *regs);extern void write_ptbase(struct vcpu *v);void destroy_gdt(struct vcpu *d);long set_gdt(struct vcpu *d, unsigned long *frames, unsigned int entries);#define write_debugreg(reg, val) do { \ unsigned long __val = val; \ asm volatile ( "mov %0,%%db" #reg : : "r" (__val) ); \} while (0)#define read_debugreg(reg) ({ \ unsigned long __val; \ asm volatile ( "mov %%db" #reg ",%0" : "=r" (__val) ); \ __val; \})long set_debugreg(struct vcpu *p, int reg, unsigned long value);struct microcode_header { unsigned int hdrver; unsigned int rev; unsigned int date; unsigned int sig; unsigned int cksum; unsigned int ldrver; unsigned int pf; unsigned int datasize; unsigned int totalsize; unsigned int reserved[3];};struct microcode { struct microcode_header hdr; unsigned int bits[0];};typedef struct microcode microcode_t;typedef struct microcode_header microcode_header_t;/* microcode format is extended from prescott processors */struct extended_signature { unsigned int sig; unsigned int pf; unsigned int cksum;};struct extended_sigtable { unsigned int count; unsigned int cksum; unsigned int reserved[3]; struct extended_signature sigs[0];};/* REP NOP (PAUSE) is a good thing to insert into busy-wait loops. */static always_inline void rep_nop(void){ asm volatile ( "rep;nop" : : : "memory" );}#define cpu_relax() rep_nop()/* Prefetch instructions for Pentium III and AMD Athlon */#ifdef CONFIG_MPENTIUMIII#define ARCH_HAS_PREFETCHextern always_inline void prefetch(const void *x){ asm volatile ( "prefetchnta (%0)" : : "r"(x) );}#elif CONFIG_X86_USE_3DNOW#define ARCH_HAS_PREFETCH#define ARCH_HAS_PREFETCHW#define ARCH_HAS_SPINLOCK_PREFETCHextern always_inline void prefetch(const void *x){ asm volatile ( "prefetch (%0)" : : "r"(x) );}extern always_inline void prefetchw(const void *x){ asm volatile ( "prefetchw (%0)" : : "r"(x) );}#define spin_lock_prefetch(x) prefetchw(x)#endifvoid show_stack(struct cpu_user_regs *regs);void show_stack_overflow(unsigned int cpu, unsigned long esp);void show_registers(struct cpu_user_regs *regs);void show_execution_state(struct cpu_user_regs *regs);void show_page_walk(unsigned long addr);asmlinkage void fatal_trap(int trapnr, struct cpu_user_regs *regs);#ifdef CONFIG_COMPATvoid compat_show_guest_stack(struct cpu_user_regs *, int lines);#else#define compat_show_guest_stack(regs, lines) ((void)0)#endifextern void mtrr_ap_init(void);extern void mtrr_bp_init(void);void mcheck_init(struct cpuinfo_x86 *c);asmlinkage void do_machine_check(struct cpu_user_regs *regs);int cpuid_hypervisor_leaves( uint32_t idx, uint32_t *eax, uint32_t *ebx, uint32_t *ecx, uint32_t *edx);int rdmsr_hypervisor_regs( uint32_t idx, uint32_t *eax, uint32_t *edx);int wrmsr_hypervisor_regs( uint32_t idx, uint32_t eax, uint32_t edx);#endif /* !__ASSEMBLY__ */#endif /* __ASM_X86_PROCESSOR_H *//* * Local variables: * mode: C * c-set-style: "BSD" * c-basic-offset: 4 * tab-width: 4 * indent-tabs-mode: nil * End: */
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -