x86_emulate.c
来自「xen虚拟机源代码安装包」· C语言 代码 · 共 1,781 行 · 第 1/5 页
C
1,781 行
"pushf; " \"pop %"_tmp"; " \"andl %"_msk",%"_LO32 _tmp"; " \"orl %"_LO32 _tmp",%"_sav"; "/* Raw emulation: instruction has two explicit operands. */#define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\do{ unsigned long _tmp; \ switch ( (_dst).bytes ) \ { \ case 2: \ asm volatile ( \ _PRE_EFLAGS("0","4","2") \ _op"w %"_wx"3,%1; " \ _POST_EFLAGS("0","4","2") \ : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \ : _wy ((_src).val), "i" (EFLAGS_MASK), \ "m" (_eflags), "m" ((_dst).val) ); \ break; \ case 4: \ asm volatile ( \ _PRE_EFLAGS("0","4","2") \ _op"l %"_lx"3,%1; " \ _POST_EFLAGS("0","4","2") \ : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \ : _ly ((_src).val), "i" (EFLAGS_MASK), \ "m" (_eflags), "m" ((_dst).val) ); \ break; \ case 8: \ __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \ break; \ } \} while (0)#define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\do{ unsigned long _tmp; \ switch ( (_dst).bytes ) \ { \ case 1: \ asm volatile ( \ _PRE_EFLAGS("0","4","2") \ _op"b %"_bx"3,%1; " \ _POST_EFLAGS("0","4","2") \ : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \ : _by ((_src).val), "i" (EFLAGS_MASK), \ "m" (_eflags), "m" ((_dst).val) ); \ break; \ default: \ __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\ break; \ } \} while (0)/* Source operand is byte-sized and may be restricted to just %cl. */#define emulate_2op_SrcB(_op, _src, _dst, _eflags) \ __emulate_2op(_op, _src, _dst, _eflags, \ "b", "c", "b", "c", "b", "c", "b", "c")/* Source operand is byte, word, long or quad sized. */#define emulate_2op_SrcV(_op, _src, _dst, _eflags) \ __emulate_2op(_op, _src, _dst, _eflags, \ "b", "q", "w", "r", _LO32, "r", "", "r")/* Source operand is word, long or quad sized. */#define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \ __emulate_2op_nobyte(_op, _src, _dst, _eflags, \ "w", "r", _LO32, "r", "", "r")/* Instruction has only one explicit operand (no source operand). */#define emulate_1op(_op,_dst,_eflags) \do{ unsigned long _tmp; \ switch ( (_dst).bytes ) \ { \ case 1: \ asm volatile ( \ _PRE_EFLAGS("0","3","2") \ _op"b %1; " \ _POST_EFLAGS("0","3","2") \ : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \ : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \ break; \ case 2: \ asm volatile ( \ _PRE_EFLAGS("0","3","2") \ _op"w %1; " \ _POST_EFLAGS("0","3","2") \ : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \ : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \ break; \ case 4: \ asm volatile ( \ _PRE_EFLAGS("0","3","2") \ _op"l %1; " \ _POST_EFLAGS("0","3","2") \ : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \ : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \ break; \ case 8: \ __emulate_1op_8byte(_op, _dst, _eflags); \ break; \ } \} while (0)/* Emulate an instruction with quadword operands (x86/64 only). */#if defined(__x86_64__)#define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \do{ asm volatile ( \ _PRE_EFLAGS("0","4","2") \ _op"q %"_qx"3,%1; " \ _POST_EFLAGS("0","4","2") \ : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \ : _qy ((_src).val), "i" (EFLAGS_MASK), \ "m" (_eflags), "m" ((_dst).val) ); \} while (0)#define __emulate_1op_8byte(_op, _dst, _eflags) \do{ asm volatile ( \ _PRE_EFLAGS("0","3","2") \ _op"q %1; " \ _POST_EFLAGS("0","3","2") \ : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \ : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \} while (0)#elif defined(__i386__)#define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)#define __emulate_1op_8byte(_op, _dst, _eflags)#endif /* __i386__ *//* Fetch next part of the instruction being emulated. */#define insn_fetch_bytes(_size) \({ unsigned long _x = 0, _eip = _regs.eip; \ if ( !mode_64bit() ) _eip = (uint32_t)_eip; /* ignore upper dword */ \ _regs.eip += (_size); /* real hardware doesn't truncate */ \ generate_exception_if((uint8_t)(_regs.eip - ctxt->regs->eip) > 15, \ EXC_GP, 0); \ rc = ops->insn_fetch(x86_seg_cs, _eip, &_x, (_size), ctxt); \ if ( rc ) goto done; \ _x; \})#define insn_fetch_type(_type) ((_type)insn_fetch_bytes(sizeof(_type)))#define truncate_word(ea, byte_width) \({ unsigned long __ea = (ea); \ unsigned int _width = (byte_width); \ ((_width == sizeof(unsigned long)) ? __ea : \ (__ea & ((1UL << (_width << 3)) - 1))); \})#define truncate_ea(ea) truncate_word((ea), ad_bytes)#define mode_64bit() (def_ad_bytes == 8)#define fail_if(p) \do { \ rc = (p) ? X86EMUL_UNHANDLEABLE : X86EMUL_OKAY; \ if ( rc ) goto done; \} while (0)#define generate_exception_if(p, e, ec) \({ if ( (p) ) { \ fail_if(ops->inject_hw_exception == NULL); \ rc = ops->inject_hw_exception(e, ec, ctxt) ? : X86EMUL_EXCEPTION; \ goto done; \ } \})/* * Given byte has even parity (even number of 1s)? SDM Vol. 1 Sec. 3.4.3.1, * "Status Flags": EFLAGS.PF reflects parity of least-sig. byte of result only. */static int even_parity(uint8_t v){ asm ( "test %b0,%b0; setp %b0" : "=a" (v) : "0" (v) ); return v;}/* Update address held in a register, based on addressing mode. */#define _register_address_increment(reg, inc, byte_width) \do { \ int _inc = (inc); /* signed type ensures sign extension to long */ \ unsigned int _width = (byte_width); \ if ( _width == sizeof(unsigned long) ) \ (reg) += _inc; \ else if ( mode_64bit() ) \ (reg) = ((reg) + _inc) & ((1UL << (_width << 3)) - 1); \ else \ (reg) = ((reg) & ~((1UL << (_width << 3)) - 1)) | \ (((reg) + _inc) & ((1UL << (_width << 3)) - 1)); \} while (0)#define register_address_increment(reg, inc) \ _register_address_increment((reg), (inc), ad_bytes)#define sp_pre_dec(dec) ({ \ _register_address_increment(_regs.esp, -(dec), ctxt->sp_size/8); \ truncate_word(_regs.esp, ctxt->sp_size/8); \})#define sp_post_inc(inc) ({ \ unsigned long __esp = truncate_word(_regs.esp, ctxt->sp_size/8); \ _register_address_increment(_regs.esp, (inc), ctxt->sp_size/8); \ __esp; \})#define jmp_rel(rel) \do { \ int _rel = (int)(rel); \ _regs.eip += _rel; \ if ( !mode_64bit() ) \ _regs.eip = ((op_bytes == 2) \ ? (uint16_t)_regs.eip : (uint32_t)_regs.eip); \} while (0)struct fpu_insn_ctxt { uint8_t insn_bytes; uint8_t exn_raised;};static void fpu_handle_exception(void *_fic, struct cpu_user_regs *regs){ struct fpu_insn_ctxt *fic = _fic; fic->exn_raised = 1; regs->eip += fic->insn_bytes;}#define get_fpu(_type, _fic) \do{ (_fic)->exn_raised = 0; \ fail_if(ops->get_fpu == NULL); \ rc = ops->get_fpu(fpu_handle_exception, _fic, _type, ctxt); \ if ( rc ) goto done; \} while (0)#define put_fpu(_fic) \do{ \ if ( ops->put_fpu != NULL ) \ ops->put_fpu(ctxt); \ generate_exception_if((_fic)->exn_raised, EXC_MF, -1); \} while (0)#define emulate_fpu_insn(_op) \do{ struct fpu_insn_ctxt fic; \ get_fpu(X86EMUL_FPU_fpu, &fic); \ asm volatile ( \ "movb $2f-1f,%0 \n" \ "1: " _op " \n" \ "2: \n" \ : "=m" (fic.insn_bytes) : : "memory" ); \ put_fpu(&fic); \} while (0)#define emulate_fpu_insn_memdst(_op, _arg) \do{ struct fpu_insn_ctxt fic; \ get_fpu(X86EMUL_FPU_fpu, &fic); \ asm volatile ( \ "movb $2f-1f,%0 \n" \ "1: " _op " %1 \n" \ "2: \n" \ : "=m" (fic.insn_bytes), "=m" (_arg) \ : : "memory" ); \ put_fpu(&fic); \} while (0)#define emulate_fpu_insn_memsrc(_op, _arg) \do{ struct fpu_insn_ctxt fic; \ get_fpu(X86EMUL_FPU_fpu, &fic); \ asm volatile ( \ "movb $2f-1f,%0 \n" \ "1: " _op " %1 \n" \ "2: \n" \ : "=m" (fic.insn_bytes) \ : "m" (_arg) : "memory" ); \ put_fpu(&fic); \} while (0)#define emulate_fpu_insn_stub(_bytes...) \do{ uint8_t stub[] = { _bytes, 0xc3 }; \ struct fpu_insn_ctxt fic = { .insn_bytes = sizeof(stub)-1 }; \ get_fpu(X86EMUL_FPU_fpu, &fic); \ (*(void(*)(void))stub)(); \ put_fpu(&fic); \} while (0)static unsigned long __get_rep_prefix( struct cpu_user_regs *int_regs, struct cpu_user_regs *ext_regs, int ad_bytes){ unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx : (ad_bytes == 4) ? (uint32_t)int_regs->ecx : int_regs->ecx); /* Skip the instruction if no repetitions are required. */ if ( ecx == 0 ) ext_regs->eip = int_regs->eip; return ecx;}#define get_rep_prefix() ({ \ unsigned long max_reps = 1; \ if ( rep_prefix ) \ max_reps = __get_rep_prefix(&_regs, ctxt->regs, ad_bytes); \ if ( max_reps == 0 ) \ goto done; \ max_reps; \})static void __put_rep_prefix( struct cpu_user_regs *int_regs, struct cpu_user_regs *ext_regs, int ad_bytes, unsigned long reps_completed){ unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx : (ad_bytes == 4) ? (uint32_t)int_regs->ecx : int_regs->ecx); /* Reduce counter appropriately, and repeat instruction if non-zero. */ ecx -= reps_completed; if ( ecx != 0 ) int_regs->eip = ext_regs->eip; if ( ad_bytes == 2 ) *(uint16_t *)&int_regs->ecx = ecx; else if ( ad_bytes == 4 ) int_regs->ecx = (uint32_t)ecx; else int_regs->ecx = ecx;}#define put_rep_prefix(reps_completed) ({ \ if ( rep_prefix ) \ __put_rep_prefix(&_regs, ctxt->regs, ad_bytes, reps_completed); \})/* Clip maximum repetitions so that the index register only just wraps. */#define truncate_ea_and_reps(ea, reps, bytes_per_rep) ({ \ unsigned long __todo = (ctxt->regs->eflags & EF_DF) ? (ea) : ~(ea); \ __todo = truncate_word(__todo, ad_bytes); \ __todo = (__todo / (bytes_per_rep)) + 1; \ (reps) = (__todo < (reps)) ? __todo : (reps); \ truncate_word((ea), ad_bytes); \})/* Compatibility function: read guest memory, zero-extend result to a ulong. */static int read_ulong( enum x86_segment seg, unsigned long offset, unsigned long *val, unsigned int bytes, struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops){ *val = 0; return ops->read(seg, offset, val, bytes, ctxt);}/* * Unsigned multiplication with double-word result. * IN: Multiplicand=m[0], Multiplier=m[1] * OUT: Return CF/OF (overflow status); Result=m[1]:m[0] */static int mul_dbl(unsigned long m[2]){ int rc; asm ( "mul %4; seto %b2"
⌨️ 快捷键说明
复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?