📄 privop.c
字号:
/* * Privileged operation "API" handling functions. * * Copyright (C) 2004 Hewlett-Packard Co. * Dan Magenheimer (dan.magenheimer@hp.com) * */#include <asm/privop.h>#include <asm/vcpu.h>#include <asm/processor.h>#include <asm/delay.h> // Debug only#include <asm/dom_fw.h>#include <asm/vhpt.h>#include <asm/bundle.h>#include <asm/debugger.h>#include <xen/perfc.h>static const long priv_verbose = 0;/* Set to 1 to handle privified instructions from the privify tool. */#ifndef CONFIG_PRIVIFYstatic const int privify_en = 0;#elsestatic const int privify_en = 1;#endif/**************************************************************************Privileged operation emulation routines**************************************************************************/static IA64FAULT priv_rfi(VCPU * vcpu, INST64 inst){ REGS *regs = vcpu_regs(vcpu); if (PSCB(vcpu, ifs) > 0x8000000000000000UL && regs->cr_ifs > 0x8000000000000000UL) { panic_domain(regs, "rfi emulation with double uncover is " "impossible - use hyperprivop\n" " ip=0x%lx vifs=0x%lx ifs=0x%lx\n", regs->cr_iip, PSCB(vcpu, ifs), regs->cr_ifs); } return vcpu_rfi(vcpu);}static IA64FAULT priv_bsw0(VCPU * vcpu, INST64 inst){ return vcpu_bsw0(vcpu);}static IA64FAULT priv_bsw1(VCPU * vcpu, INST64 inst){ return vcpu_bsw1(vcpu);}static IA64FAULT priv_cover(VCPU * vcpu, INST64 inst){ return vcpu_cover(vcpu);}static IA64FAULT priv_ptc_l(VCPU * vcpu, INST64 inst){ u64 vadr = vcpu_get_gr(vcpu, inst.M45.r3); u64 log_range; log_range = ((vcpu_get_gr(vcpu, inst.M45.r2) & 0xfc) >> 2); return vcpu_ptc_l(vcpu, vadr, log_range);}static IA64FAULT priv_ptc_e(VCPU * vcpu, INST64 inst){ unsigned int src = inst.M28.r3; // NOTE: ptc_e with source gr > 63 is emulated as a fc r(y-64) if (privify_en && src > 63) return vcpu_fc(vcpu, vcpu_get_gr(vcpu, src - 64)); return vcpu_ptc_e(vcpu, vcpu_get_gr(vcpu, src));}static IA64FAULT priv_ptc_g(VCPU * vcpu, INST64 inst){ u64 vadr = vcpu_get_gr(vcpu, inst.M45.r3); u64 addr_range; addr_range = 1 << ((vcpu_get_gr(vcpu, inst.M45.r2) & 0xfc) >> 2); return vcpu_ptc_g(vcpu, vadr, addr_range);}static IA64FAULT priv_ptc_ga(VCPU * vcpu, INST64 inst){ u64 vadr = vcpu_get_gr(vcpu, inst.M45.r3); u64 addr_range; addr_range = 1 << ((vcpu_get_gr(vcpu, inst.M45.r2) & 0xfc) >> 2); return vcpu_ptc_ga(vcpu, vadr, addr_range);}static IA64FAULT priv_ptr_d(VCPU * vcpu, INST64 inst){ u64 vadr = vcpu_get_gr(vcpu, inst.M45.r3); u64 log_range; log_range = (vcpu_get_gr(vcpu, inst.M45.r2) & 0xfc) >> 2; return vcpu_ptr_d(vcpu, vadr, log_range);}static IA64FAULT priv_ptr_i(VCPU * vcpu, INST64 inst){ u64 vadr = vcpu_get_gr(vcpu, inst.M45.r3); u64 log_range; log_range = (vcpu_get_gr(vcpu, inst.M45.r2) & 0xfc) >> 2; return vcpu_ptr_i(vcpu, vadr, log_range);}static IA64FAULT priv_tpa(VCPU * vcpu, INST64 inst){ u64 padr; unsigned int fault; unsigned int src = inst.M46.r3; // NOTE: tpa with source gr > 63 is emulated as a ttag rx=r(y-64) if (privify_en && src > 63) fault = vcpu_ttag(vcpu, vcpu_get_gr(vcpu, src - 64), &padr); else fault = vcpu_tpa(vcpu, vcpu_get_gr(vcpu, src), &padr); if (fault == IA64_NO_FAULT) return vcpu_set_gr(vcpu, inst.M46.r1, padr, 0); else return fault;}static IA64FAULT priv_tak(VCPU * vcpu, INST64 inst){ u64 key; unsigned int fault; unsigned int src = inst.M46.r3; // NOTE: tak with source gr > 63 is emulated as a thash rx=r(y-64) if (privify_en && src > 63) fault = vcpu_thash(vcpu, vcpu_get_gr(vcpu, src - 64), &key); else fault = vcpu_tak(vcpu, vcpu_get_gr(vcpu, src), &key); if (fault == IA64_NO_FAULT) return vcpu_set_gr(vcpu, inst.M46.r1, key, 0); else return fault;}/************************************ * Insert translation register/cache************************************/static IA64FAULT priv_itr_d(VCPU * vcpu, INST64 inst){ u64 fault, itir, ifa, pte, slot; //if (!vcpu_get_psr_ic(vcpu)) // return IA64_ILLOP_FAULT; fault = vcpu_get_itir(vcpu, &itir); if (fault != IA64_NO_FAULT) return IA64_ILLOP_FAULT; fault = vcpu_get_ifa(vcpu, &ifa); if (fault != IA64_NO_FAULT) return IA64_ILLOP_FAULT; pte = vcpu_get_gr(vcpu, inst.M42.r2); slot = vcpu_get_gr(vcpu, inst.M42.r3); return vcpu_itr_d(vcpu, slot, pte, itir, ifa);}static IA64FAULT priv_itr_i(VCPU * vcpu, INST64 inst){ u64 fault, itir, ifa, pte, slot; //if (!vcpu_get_psr_ic(vcpu)) return IA64_ILLOP_FAULT; fault = vcpu_get_itir(vcpu, &itir); if (fault != IA64_NO_FAULT) return IA64_ILLOP_FAULT; fault = vcpu_get_ifa(vcpu, &ifa); if (fault != IA64_NO_FAULT) return IA64_ILLOP_FAULT; pte = vcpu_get_gr(vcpu, inst.M42.r2); slot = vcpu_get_gr(vcpu, inst.M42.r3); return vcpu_itr_i(vcpu, slot, pte, itir, ifa);}static IA64FAULT priv_itc_d(VCPU * vcpu, INST64 inst){ u64 fault, itir, ifa, pte; //if (!vcpu_get_psr_ic(vcpu)) return IA64_ILLOP_FAULT; fault = vcpu_get_itir(vcpu, &itir); if (fault != IA64_NO_FAULT) return IA64_ILLOP_FAULT; fault = vcpu_get_ifa(vcpu, &ifa); if (fault != IA64_NO_FAULT) return IA64_ILLOP_FAULT; pte = vcpu_get_gr(vcpu, inst.M41.r2); return vcpu_itc_d(vcpu, pte, itir, ifa);}static IA64FAULT priv_itc_i(VCPU * vcpu, INST64 inst){ u64 fault, itir, ifa, pte; //if (!vcpu_get_psr_ic(vcpu)) return IA64_ILLOP_FAULT; fault = vcpu_get_itir(vcpu, &itir); if (fault != IA64_NO_FAULT) return IA64_ILLOP_FAULT; fault = vcpu_get_ifa(vcpu, &ifa); if (fault != IA64_NO_FAULT) return IA64_ILLOP_FAULT; pte = vcpu_get_gr(vcpu, inst.M41.r2); return vcpu_itc_i(vcpu, pte, itir, ifa);}/************************************* * Moves to semi-privileged registers*************************************/static IA64FAULT priv_mov_to_ar_imm(VCPU * vcpu, INST64 inst){ // I27 and M30 are identical for these fields u64 ar3 = inst.M30.ar3; u64 imm = vcpu_get_gr(vcpu, inst.M30.imm); return vcpu_set_ar(vcpu, ar3, imm);}static IA64FAULT priv_mov_to_ar_reg(VCPU * vcpu, INST64 inst){ // I26 and M29 are identical for these fields u64 ar3 = inst.M29.ar3; if (privify_en && inst.M29.r2 > 63 && inst.M29.ar3 < 8) { // privified mov from kr u64 val; if (vcpu_get_ar(vcpu, ar3, &val) != IA64_ILLOP_FAULT) return vcpu_set_gr(vcpu, inst.M29.r2 - 64, val, 0); else return IA64_ILLOP_FAULT; } else { u64 r2 = vcpu_get_gr(vcpu, inst.M29.r2); return vcpu_set_ar(vcpu, ar3, r2); }}/******************************** * Moves to privileged registers********************************/static IA64FAULT priv_mov_to_pkr(VCPU * vcpu, INST64 inst){ u64 r3 = vcpu_get_gr(vcpu, inst.M42.r3); u64 r2 = vcpu_get_gr(vcpu, inst.M42.r2); return vcpu_set_pkr(vcpu, r3, r2);}static IA64FAULT priv_mov_to_rr(VCPU * vcpu, INST64 inst){ u64 r3 = vcpu_get_gr(vcpu, inst.M42.r3); u64 r2 = vcpu_get_gr(vcpu, inst.M42.r2); return vcpu_set_rr(vcpu, r3, r2);}static IA64FAULT priv_mov_to_dbr(VCPU * vcpu, INST64 inst){ u64 r3 = vcpu_get_gr(vcpu, inst.M42.r3); u64 r2 = vcpu_get_gr(vcpu, inst.M42.r2); return vcpu_set_dbr(vcpu, r3, r2);}static IA64FAULT priv_mov_to_ibr(VCPU * vcpu, INST64 inst){ u64 r3 = vcpu_get_gr(vcpu, inst.M42.r3); u64 r2 = vcpu_get_gr(vcpu, inst.M42.r2); return vcpu_set_ibr(vcpu, r3, r2);}static IA64FAULT priv_mov_to_pmc(VCPU * vcpu, INST64 inst){ u64 r3 = vcpu_get_gr(vcpu, inst.M42.r3); u64 r2 = vcpu_get_gr(vcpu, inst.M42.r2); return vcpu_set_pmc(vcpu, r3, r2);}static IA64FAULT priv_mov_to_pmd(VCPU * vcpu, INST64 inst){ u64 r3 = vcpu_get_gr(vcpu, inst.M42.r3); u64 r2 = vcpu_get_gr(vcpu, inst.M42.r2); return vcpu_set_pmd(vcpu, r3, r2);}static IA64FAULT priv_mov_to_cr(VCPU * vcpu, INST64 inst){ u64 val = vcpu_get_gr(vcpu, inst.M32.r2); perfc_incra(mov_to_cr, inst.M32.cr3); switch (inst.M32.cr3) { case 0: return vcpu_set_dcr(vcpu, val); case 1: return vcpu_set_itm(vcpu, val); case 2: return vcpu_set_iva(vcpu, val); case 8: return vcpu_set_pta(vcpu, val); case 16: return vcpu_set_ipsr(vcpu, val); case 17: return vcpu_set_isr(vcpu, val); case 19: return vcpu_set_iip(vcpu, val); case 20: return vcpu_set_ifa(vcpu, val); case 21: return vcpu_set_itir(vcpu, val); case 22: return vcpu_set_iipa(vcpu, val); case 23: return vcpu_set_ifs(vcpu, val); case 24: return vcpu_set_iim(vcpu, val); case 25: return vcpu_set_iha(vcpu, val); case 64: return vcpu_set_lid(vcpu, val); case 65: return IA64_ILLOP_FAULT; case 66: return vcpu_set_tpr(vcpu, val); case 67: return vcpu_set_eoi(vcpu, val); case 68: return IA64_ILLOP_FAULT; case 69: return IA64_ILLOP_FAULT; case 70: return IA64_ILLOP_FAULT; case 71: return IA64_ILLOP_FAULT; case 72: return vcpu_set_itv(vcpu, val); case 73: return vcpu_set_pmv(vcpu, val); case 74: return vcpu_set_cmcv(vcpu, val); case 80: return vcpu_set_lrr0(vcpu, val); case 81: return vcpu_set_lrr1(vcpu, val); default: return IA64_ILLOP_FAULT; }}static IA64FAULT priv_rsm(VCPU * vcpu, INST64 inst){ u64 imm24 = (inst.M44.i << 23) | (inst.M44.i2 << 21) | inst.M44.imm; return vcpu_reset_psr_sm(vcpu, imm24);}static IA64FAULT priv_ssm(VCPU * vcpu, INST64 inst){ u64 imm24 = (inst.M44.i << 23) | (inst.M44.i2 << 21) | inst.M44.imm; return vcpu_set_psr_sm(vcpu, imm24);}/** * @todo Check for reserved bits and return IA64_RSVDREG_FAULT. */static IA64FAULT priv_mov_to_psr(VCPU * vcpu, INST64 inst){ u64 val = vcpu_get_gr(vcpu, inst.M35.r2); return vcpu_set_psr_l(vcpu, val);}/********************************** * Moves from privileged registers **********************************/static IA64FAULT priv_mov_from_rr(VCPU * vcpu, INST64 inst){ u64 val; IA64FAULT fault; u64 reg; reg = vcpu_get_gr(vcpu, inst.M43.r3); if (privify_en && inst.M43.r1 > 63) { // privified mov from cpuid fault = vcpu_get_cpuid(vcpu, reg, &val); if (fault == IA64_NO_FAULT) return vcpu_set_gr(vcpu, inst.M43.r1 - 64, val, 0); } else { fault = vcpu_get_rr(vcpu, reg, &val); if (fault == IA64_NO_FAULT) return vcpu_set_gr(vcpu, inst.M43.r1, val, 0); } return fault;}static IA64FAULT priv_mov_from_pkr(VCPU * vcpu, INST64 inst){ u64 val; IA64FAULT fault; fault = vcpu_get_pkr(vcpu, vcpu_get_gr(vcpu, inst.M43.r3), &val); if (fault == IA64_NO_FAULT) return vcpu_set_gr(vcpu, inst.M43.r1, val, 0); else return fault;}static IA64FAULT priv_mov_from_dbr(VCPU * vcpu, INST64 inst){ u64 val; IA64FAULT fault; fault = vcpu_get_dbr(vcpu, vcpu_get_gr(vcpu, inst.M43.r3), &val); if (fault == IA64_NO_FAULT) return vcpu_set_gr(vcpu, inst.M43.r1, val, 0); else return fault;}static IA64FAULT priv_mov_from_ibr(VCPU * vcpu, INST64 inst){ u64 val; IA64FAULT fault; fault = vcpu_get_ibr(vcpu, vcpu_get_gr(vcpu, inst.M43.r3), &val); if (fault == IA64_NO_FAULT) return vcpu_set_gr(vcpu, inst.M43.r1, val, 0); else return fault;}static IA64FAULT priv_mov_from_pmc(VCPU * vcpu, INST64 inst){ u64 val; IA64FAULT fault; u64 reg;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -