⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 jitregman.c

📁 This is a resource based on j2me embedded,if you dont understand,you can connection with me .
💻 C
📖 第 1 页 / 共 5 页
字号:
	    CVMconsolePrintf("64bit!\n");	    CVMCPUemitBranch(cc, CVMJITcbufGetLogicalPC(con) + 			     2*CVMCPU_INSTRUCTION_SIZE,			     CVMCPU_COND_NE);	    CVMCPUemitCompareRegister(cc, CVMCPU_CMP_OPCODE,		CVMCPU_COND_NE, CVMCPU_JSP_REG, rp->regno+1);	}	CVMJITaddCodegenComment((cc, "call CVMsystemPanic"));	CVMJITsetSymbolName((cc, "CVMsystemPanic"));	CVMCPUemitAbsoluteCallConditional(cc, CVMsystemPanic, 	    CVMJIT_CPDUMPOK, CVMJIT_CPBRANCHOK, CVMCPU_COND_NE);	/* restore JFP and JSP */	CVMCPUemitCCEEReferenceImmediate(cc, CVMCPU_LDR32_OPCODE,	    CVMCPU_JFP_REG, offsetof(CVMCCExecEnv, ccmStorage));	if (rp->size == 2) {	    CVMCPUemitCCEEReferenceImmediate(cc, CVMCPU_LDR32_OPCODE,		CVMCPU_JSP_REG,		offsetof(CVMCCExecEnv, ccmStorage) + sizeof(CVMUint32));	}#endif    } else if (!CVMRMisConstant(rp)) {	/* spill to temp location */	CVMJITaddCodegenComment((cc, CVMRMisRef(rp)?"REF spill":"spill"));	if (rp->spillLoc < 0){	    rp->spillLoc = findSpillLoc(cc, rp->size, CVMRMisRef(rp));	}	CVMCPUemitFrameReference(cc, RM_STORE_OPCODE(con, rp->size), rp->regno,	    CVMCPU_FRAME_TEMP, rp->spillLoc);    }}static voidspillRegister(CVMJITRMContext* con, int regNo, CVMBool evict){    CVMRMResource* rp            = con->reg2res[regNo];    CVMassert(rp != NULL);    CVMassert(!(rp->flags & CVMRMpinned));    if (rp->flags & CVMRMdirty) {	flushResource(con, rp);	rp->flags ^= CVMRMdirty;    }    if (evict) {        /* Disassociate the register from the resource: */        con->occupiedRegisters &= ~rp->rmask; /* Release the register. */	con->reg2res[rp->regno] = NULL;	if (rp->nregs > 1){	    CVMassert(rp->nregs == 2);	    con->reg2res[rp->regno + 1] = NULL;	}	rp->regno = -1;    }}/* Purpose: Finds an available register that fits the specified profile as            defined by the various parameters. */static int findAvailableRegister0(    CVMJITRMContext* con,    CVMRMregset	target,    CVMRMregset	avoid,    CVMRMRegisterPreference pref,    CVMBool     targetSetIsStrict,    CVMBool     okToSpill,    int		nregs){    int		   regNo = -1;    CVMRMregset	   availList[6];    CVMRMregset	   prefMask;    CVMRMregset    unoccupiedRegisters;    CVMRMregset    unpinnedRegisters;    int            numTargetGroups = 1;    struct {	int numTargetSets;        CVMRMregset targetList[2];    } targetGroup[2];    int		   numAvailSets = 0;    int		   g;    /* Exclude registers that are not on RM_ANY_SET. */    target &= RM_ANY_SET;    if (target == ~CVMRM_ANY_SET && avoid == CVMRM_ANY_SET) {	/*	 * This is a byproduct of the caller trying to avoid the target	 * set of a subsequent resource allocation, and in this case the	 * target was CVMRM_ANY_SET. Clean this up by allowing any register	 * to be used.	 */	target = CVMRM_ANY_SET;	avoid = CVMRM_EMPTY_SET;    }    /*     * If there are no target registers that are not in the avoid set,     * the we are best off not trying to allocate from the target set     * and instead allocate from ~avoid. This way the register won't get     * trashed after being evaluated, and instead can be moved to the correct     * target register later.     *     * However, if avoid is RM_ANY_SET, then we just end up picking a random     * register, so we might as allocate from the target set in case the     * resource has a refcount > 1 and gets used at least once before spilled.     */    if ((target & ~avoid) == 0 && (avoid != RM_ANY_SET)) {	target = ~avoid;    }    switch (pref) {    case CVMRM_REGPREF_TARGET:        /* CVMRM_REGPREF_TARGET: Prefers that the reg be allocated from the           specified target set.  No additional restriction necessary. */	prefMask = RM_ANY_SET;	break;    case CVMRM_REGPREF_SCRATCH:        /* CVMRM_REGPREF_SCRATCH: The reg to be allocated is intended for use           as a scratch register, and its content is not expected to be           retained for a long time.  Hence, we would prefer to allocate it           from the unsafe set (could be trashed by C calls) so as not to           compete with those with pref CVMRM_REGPREF_PERSISTENT. */	prefMask = RM_UNSAFE_SET;	break;    case CVMRM_REGPREF_PERSISTENT:        /* CVMRM_REGPREF_PERSISTENT: The reg to be allocated will probably be           used to hold content that will be retained for a long time.  Hence,           we would prefer to allocate it from the safe set (won't be trashed           by C calls) so as not to minimize the need for spilling and           reloading. */	prefMask = RM_SAFE_SET;	break;    default:	CVMassert(CVM_FALSE);	prefMask = 0; /* get rid of compiler warning */	break;    };    /* There can be up to 4 possible target register lists organized as 2       groups.  The first group is for any reg allocation.  The second group       is only applicable if the register need not be strictly allocated from       the specified target set.  The lists are:        1. Group 0, List 0: The target set specified by the caller limited to                            the preferred set.        2. Group 0, List 1: The full target set specified by the caller.        3. Group 1, List 0: Any register limited to the preferred set.        4. Group 1, List 1: Any register.    */    targetGroup[0].numTargetSets = 1;    targetGroup[0].targetList[0] = target & prefMask; /* Group 0, List 0. */    if (pref != CVMRM_REGPREF_TARGET) {	targetGroup[0].targetList[1] = target; /* Group 0, List 1. */	targetGroup[0].numTargetSets = 2;    }    if (!targetSetIsStrict) {        numTargetGroups++;	targetGroup[1].numTargetSets = 1;	targetGroup[1].targetList[0] = RM_ANY_SET & prefMask; /* G1,L0. */	if (pref != CVMRM_REGPREF_TARGET) {	    targetGroup[1].targetList[1] = RM_ANY_SET; /* Group 1,List 1.*/	    targetGroup[1].numTargetSets = 2;	} else {	    CVMassert(RM_ANY_SET == targetGroup[1].targetList[0]);	}    }    /* The target lists about will be match against the following availability       sets.  The availability sets are:        Set 1: Unoccupied regs not in the avoid set, and in                the sandboxRegSet.        Set 2: All unoccupied regs in the sandboxRegSet.        Set 3: Unpinned regs not in the avoid set, and in                the sandboxRegSet.        Set 4: All unpinned regs in the sandboxRegSet.    */    unoccupiedRegisters = ~con->occupiedRegisters & con->sandboxRegSet;    unpinnedRegisters = ~con->pinnedRegisters & con->sandboxRegSet;    /* include unoccupied, unavoided, sandbox registers */    availList[numAvailSets++] = ~avoid & unoccupiedRegisters; /* Set 1.*/#ifndef PREFER_SPILL_OVER_AVOID    /* include unoccupied, avoided, sandbox registers */    {	if (unoccupiedRegisters != availList[numAvailSets - 1]) {	    availList[numAvailSets++] = unoccupiedRegisters; /* Set 2.*/	}    }#endif    if (okToSpill) {	CVMRMregset set;#if 0	/* include clean, occupied, unavoided, sandbox registers */	set = ~avoid & ~con->dirtyRegisters & con->sandboxRegSet;	if (set != availList[numAvailSets - 1]) {	    availList[numAvailSets++] = set;	}#endif	/* include dirty, occupied, unavoided, sandbox registers */	set = ~avoid & unpinnedRegisters;	if (set != availList[numAvailSets - 1]) {	    availList[numAvailSets++] = set; /* Set 3. */	}    }#ifdef PREFER_SPILL_OVER_AVOID    /* include unoccupied, avoided, sandbox registers */    availList[numAvailSets++] = unoccupiedRegisters; /* Set 2. */#endif    if (okToSpill) {	CVMRMregset set;#if 0	/* include clean, occupied, avoided, sandbox registers */	set = ~con->dirtyRegisters & con->sandboxRegSet;	if (set != availList[numAvailSets - 1]) {	    availList[numAvailSets++] = set;	}#endif	/* include dirty, occupied, avoided, sandbox registers */	set = unpinnedRegisters;	if (set != availList[numAvailSets - 1]) {	    availList[numAvailSets++] = set; /* Set 4. */	}    }    /* Now that the target and availability sets have all been set up, go find       a match: */    for (g = 0; g < numTargetGroups; ++g) {	int a;	CVMRMregset *targetList = targetGroup[g].targetList;	size_t numTargetSets = targetGroup[g].numTargetSets;	for (a = 0; a < numAvailSets; ++a) {	    int t;	    CVMRMregset avail = availList[a];	    for (t = 0; t < numTargetSets; ++t) {		CVMRMregset target = targetList[t];                /* Attempt to allocate from the intersection of the current                   target and availability sets: */		if (nregs == 1) {		    target &= avail;		} else {		    target &= avail & (avail>>1);		}		if (target != CVMRM_EMPTY_SET) {		    int regNo;		    CVMassert(avail != CVMRM_EMPTY_SET);		    regNo = findMaxRegInSet(target,				RM_MIN_INTERESTING_REG, RM_MAX_INTERESTING_REG,				(nregs>1) ? RM_DOUBLE_REG_ALIGN :					   RM_SINGLE_REG_ALIGN);		    if (regNo != -1) {                        if (okToSpill) {			    int i;			    /* special fussing for nregs == 2 */			    for (i=0; i < nregs; i++ ){				if (con->occupiedRegisters &				    (1U<<(regNo+i))){				    /* If sandbox is in effect, the register				       must come from the sandboxRegSet. */                                    CVMassert((con->sandboxRegSet &                                              (1U<<(regNo+i))) != 0);				    spillRegister(con, regNo+i, CVM_TRUE);				}			    }			}			return regNo;		    }		}	    }	}    }    CVMassert(regNo == -1);    return regNo;}static intfindAvailableRegister(    CVMJITRMContext* con,    CVMRMregset target,    CVMRMregset avoid,    CVMRMRegisterPreference pref,    CVMBool     strict,    int		nregs){    int reg = findAvailableRegister0(con, target, avoid,				     pref, strict, CVM_TRUE, nregs);    CVMassert(!strict || reg == -1 || (target & (1U << reg)));    return reg;}static voidpinToRegister(    CVMJITRMContext* con,    CVMRMResource* rp,    int regNo){    if (rp->regno != -1 && rp->regno != regNo) {	/* handle shuffle */	CVMassert(con->reg2res[rp->regno] == rp);	/* dirty is OK, but pinned is not */	CVMassert((rp->flags & CVMRMpinned) == 0);	con->reg2res[rp->regno] = NULL;	if (rp->nregs > 1) {	    CVMassert(con->reg2res[rp->regno + 1] == rp);	    con->reg2res[rp->regno + 1] = NULL;	}    }    rp->flags |= CVMRMpinned;    rp->regno = regNo;    rp->rmask = ((1U << rp->nregs) - 1) << regNo;    /* pin this resource to this register */    con->pinnedRegisters |= rp->rmask;    con->occupiedRegisters |= rp->rmask;    {	CVMRMResource* prev = con->reg2res[regNo];	if (prev != NULL && prev != rp) {	    CVMassert((prev->flags & CVMRMpinned) == 0);	    CVMassert((prev->flags & CVMRMdirty) == 0);	    prev->regno = -1;	}    }    con->reg2res[rp->regno] = rp;    if (rp->nregs > 1 ){	CVMassert(rp->nregs == 2);	con->reg2res[rp->regno + 1] = rp;    }#ifdef CVM_DEBUG_ASSERTS    rp->key = con->key;#endif}/* * Re-compute a constant into a register based on its value */static voidreloadConstant32(    CVMJITCompilationContext* con,    CVMJITRMContext* rx,    CVMRMResource* rp){    CVMassert(CVMRMisConstant(rp));#ifdef CVM_TRACE_JIT    if (rp->name != NULL) {        CVMJITaddCodegenComment((con, rp->name));    } else {        CVMJITaddCodegenComment((con, "const %d", rp->constant));    }    CVMJITsetSymbolName((con, rp->name));#endif    /* Unmark the resource and ask codegen to load the constant     * to the register.  After the constant is loaded mark the resource     * as containing a constant.     */    rp->flags &= ~CVMRMConstant32;    (rx->constantLoader32)(con, rp->regno, rp->constant);    rp->flags |= CVMRMConstant32;}/* * Re-compute a local into a register */static voidreloadLocal(CVMJITCompilationContext* con, int opcode, CVMRMResource* rp){    CVMassert(CVMRMisLocal(rp));    CVMCPUemitFrameReference(con, opcode, rp->regno,                             CVMCPU_FRAME_LOCAL, rp->localNo);}/* * reloadRegister() knows how to re-compute a resource into a register. * Special-cased are constants, local variables, stack locations, * and frame references */static voidreloadRegister(    CVMJITCompilationContext* con,    CVMJITRMContext* rc,    CVMRMResource* rp){    /*     * a spilled resource has just been re-pinned     * and needs to be reloaded.     */    if (CVMRMisConstant(rp)) {        reloadConstant32(con, rc, rp);    } else if (CVMRMisLocal(rp)) {	reloadLocal(con, RM_LOAD_OPCODE(rc, rp->size), rp);    } else if (CVMRMisJavaStackTopValue(rp)) {	/*	 * this is a deferred pop operation.	 * Make sure that this is the stack top.	 */	CVMSMassertStackTop(con, rp->expr);	if (rp->size == 1){	    CVMSMpopSingle(con, rc, rp);	} else {	    CVMSMpopDouble(con, rc, rp);	}        CVMRMclearJavaStackTopValue(rp); /* not on top of the stack any more.*/        rp->flags |= CVMRMdirty;    } else {	CVMassert(rp->spillLoc >= 0);        CVMCPUemitFrameReference(con, RM_LOAD_OPCODE(rc, rp->size), rp->regno,				 CVMCPU_FRAME_TEMP, rp->spillLoc);    }}static voidshuffleRegisters(     CVMJITRMContext* con,    CVMRMResource* rp,    CVMRMregset	target,    CVMRMregset	avoid,    CVMRMRegisterPreference pref,    CVMBool strict){    /*      * The resource is currently in a register we want to avoid.     * We need to find an acceptable register, then move it.     */    int regNo;    int oldRegNo = rp->regno;    CVMRMregset oldrmask = rp->rmask;    CVMJITCompilationContext* cc = con->compilationContext;    CVMRMassertContextMatch(con, rp);    /*     * We already know that the resource is not in the target set, so the     * only way findAvailableRegister() will cause the resource to spill is     * if the resource is a 64-bit one and its 2nd register overlaps the      * target set (we know the first one is not in it). We choose not to pin     * the resource and risk the wasted spill in this case. It turns out that     * this is extremely rare and keeping it pinned would result in      * findAvailableRegister() failing.     */    regNo = findAvailableRegister(con, target, avoid, pref, strict, rp->nregs);    CVMassert(regNo != -1);    /*     * If you expect excess spilling because the resource is not kept pinned,     * then enable the following code. I could only get it to trigger the     * printf once when running the entire tck.

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -