⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 excalib.s

📁 VxWorks BSP框架源代码包含头文件和驱动
💻 S
📖 第 1 页 / 共 4 页
字号:
#define	_MSR_IR	0#endif  /* _PPC_MSR_IR */#ifdef	_PPC_MSR_DR#define	_MSR_DR	_PPC_MSR_DR#else  /* _PPC_MSR_DR */#define	_MSR_DR	0#endif  /* _PPC_MSR_DR */#ifdef	_PPC_MSR_IS#define	_MSR_IS	_PPC_MSR_IS#else  /* _PPC_MSR_IS */#define	_MSR_IS	0#endif  /* _PPC_MSR_IS */#ifdef	_PPC_MSR_DS#define	_MSR_DS	_PPC_MSR_DS#else  /* _PPC_MSR_DS */#define	_MSR_DS	0#endif  /* _PPC_MSR_DS */#ifdef	_PPC_MSR_CE#define	_MSR_CE	_PPC_MSR_CE#else	/* _PPC_MSR_CE */#define	_MSR_CE	0#endif	/* _PPC_MSR_CE */#ifdef	_PPC_MSR_MCE#define	_MSR_MCE	_PPC_MSR_MCE#else	/* _PPC_MSR_CE */#define	_MSR_MCE	0#endif	/* _PPC_MSR_CE */#if	((CPU != PPC403) && (CPU != PPC405) && (CPU != PPC405F) && \	 (CPU != PPC440) && (CPU != PPC85XX))# if	(CPU != PPC509)	andi.	p0, p0, _MSR_RI | _MSR_FP | _MSR_IR | _MSR_DR | _MSR_IS | _MSR_DS | _PPC_MSR_EE# else	/* CPU == PPC509 */	andi.	p0, p0, _PPC_MSR_RI | _PPC_MSR_EE | _PPC_MSR_FP# endif	/* CPU == PPC509 */#else	/* CPU == PPC4xx, PPC85XX */	mtspr	SPRG2, p1			/* SPRG2 = MSRval current */	lis	p1,  HI(_MSR_CE | _MSR_FP | _MSR_IR | _MSR_DR | _MSR_IS | _MSR_DS | _PPC_MSR_EE)	ori	p1, p1, LO(_MSR_CE | _MSR_FP | _MSR_IR | _MSR_DR | _MSR_IS | _MSR_DS | _PPC_MSR_EE)	and.	p0, p1, p0			/* extract runtime values */	mfspr	p1, SPRG2			/* p1 = MSRval current */#endif	/* CPU != PPC4xx && PPC85XX */						/* get value of IR,DR,IS,DS,EE,RI */						/* & FP bits before exception */	or	p1, p1, p0			/* restore the previous value */						/* of IR,DR,IS,DS,EE,RI & FP bits */	mtmsr	p1				/* ENABLE INTERRUPT */	isync					/* synchronize */	mfspr   p0, LR                          /* p0 = exception number */						/* may be wrong if relocated *//* * The LR value is offset from the vector address by the size of the * bla and other instructions preceding it in excConnectCode (or in * excExtConnectCode if excExtendedVectors is in effect).  For the * "normal" exceptions handled here, the difference varies depending * on whether the processor also implements "critical" exceptions. * * In either case, the offset amounts to 4 * (ENT_OFF + 1) for "short" * vectors or 4 * EXT_ISR_OFF for extended vectors; however these symbols * are defined in excArchLib.c and the definitions are not accessible here. */	lis     p1, HIADJ(excExtendedVectors)	lwz     p1, LO(excExtendedVectors)(p1)	/* get excExtendedVectors */	cmpwi	p1, 0				/* if 0, short vectors */	beq	shortVec	li	p1, 20			/* 4 * (EXT_ISR_OFF - (ENT_OFF + 1)) */shortVec:#ifdef	_EXC_OFF_CRTL	addi	p1, p1, 28			/* 4 * (ENT_OFF + 1) */#else	/* _EXC_OFF_CRTL */	addi	p1, p1, 12			/* 4 * (ENT_OFF + 1) */#endif	/* _EXC_OFF_CRTL */	sub	p0, p0, p1	stw	p0, _PPC_ESF_VEC_OFF(sp)	/* store to ESF */	mfspr	p0, CTR				/* load CTR to P0 */	stw	p0, _PPC_ESF_CTR(sp)		/* save CTR */	mfspr	p1, XER				/* load XER to P1 */	stw	p1, _PPC_ESF_XER(sp)		/* save XER */#if	(CPU == PPC601)	mfspr	p1, MQ				/* load MQ to P1 */	stw	p1, _PPC_ESF_MQ(sp)		/* save MQ */#endif	/* (CPU == PPC601) */#if	(CPU == PPC85XX)	mfspr	p1, SPEFSCR			/* load SPEFSCR to P1 */	stw	p1, _PPC_ESF_SPEFSCR(sp)	/* save SPEFSCR */#endif	/* (CPU == PPC85XX) */	/* DEAR/DAR/ESR/DSISR not saved earlier for 5.5. compatibility	   see SPR 90228 */#if	((CPU == PPC403) || (CPU == PPC405) || (CPU == PPC405F) || \	 (CPU == PPC440) || (CPU == PPC85XX))	mfspr	p0, DEAR			/* load DEAR to P0 */	stw	p0, _PPC_ESF_DEAR(sp)		/* save DEAR */# if	(CPU == PPC85XX)	mfspr   p0, ESR				/* load ESR to P0 */	stw     p0, _PPC_ESF_ESR(sp)		/* save ESR */# endif  /* CPU == PPC85XX */#else	/* CPU == PPC4xx, PPC85XX */	mfspr	p1, DAR	stw	p1, _PPC_ESF_DAR(sp)		/* save DAR */	mfspr	p1, DSISR	stw	p1, _PPC_ESF_DSISR(sp)		/* save DSISR */#endif	/* CPU == PPC4xx, PPC85XX */	stw	r0, _PPC_ESF_R0(sp)		/* save general register 0 */	addi	r0, r1, _PPC_ESF_STK_SIZE	stw	r0, _PPC_ESF_R1(sp)		/* save exception sp */	stw	r2, _PPC_ESF_R2(sp)		/* save general register 2 */#if	TRUE					/* optimization to test */	/* save the volatile register values on the ESF */	stw	p2, _PPC_ESF_P2(sp)		/* save general register 5 */	stw	p3, _PPC_ESF_P3(sp)		/* save general register 6 */	stw	p4, _PPC_ESF_P4(sp)		/* save general register 7 */	stw	p5, _PPC_ESF_P5(sp)		/* save general register 8 */	stw	p6, _PPC_ESF_P6(sp)		/* save general register 9 */	stw	p7, _PPC_ESF_P7(sp)		/* save general register 10 */	stw	r11, _PPC_ESF_R11(sp)		/* save general register 11 */	stw	r12, _PPC_ESF_R12(sp)		/* save general register 12 */	stw	r13, _PPC_ESF_R13(sp)		/* save general register 13 */	/* save the non volatile register values on the ESF */	stw	t0, _PPC_ESF_T0(sp)		/* save general register 14 */	stw	t1, _PPC_ESF_T1(sp)		/* save general register 15 */	stw	t2, _PPC_ESF_T2(sp)		/* save general register 16 */	stw	t3, _PPC_ESF_T3(sp)		/* save general register 17 */	stw	t4, _PPC_ESF_T4(sp)		/* save general register 18 */	stw	t5, _PPC_ESF_T5(sp)		/* save general register 19 */	stw	t6, _PPC_ESF_T6(sp)		/* save general register 20 */	stw	t7, _PPC_ESF_T7(sp)		/* save general register 21 */	stw	t8, _PPC_ESF_T8(sp)		/* save general register 22 */	stw	t9, _PPC_ESF_T9(sp)		/* save general register 23 */	stw	t10, _PPC_ESF_T10(sp)		/* save general register 24 */	stw	t11, _PPC_ESF_T11(sp)		/* save general register 25 */	stw	t12, _PPC_ESF_T12(sp)		/* save general register 26 */	stw	t13, _PPC_ESF_T13(sp)		/* save general register 27 */	stw	t14, _PPC_ESF_T14(sp)		/* save general register 28 */	stw	t15, _PPC_ESF_T15(sp)		/* save general register 29 */	stw	t16, _PPC_ESF_T16(sp)		/* save general register 30 */	stw	t17, _PPC_ESF_T17(sp)		/* save general register 31 */#else	stmw	p2, _PPC_ESF_P2(sp)		/* save general register 5 */						/* through 31 */#endif	/* TRUE */	blr					/* return to caller */FUNC_END(excEnt)/********************************************************************************* excExit - default context restore routine upon exception exit** NOMANUAL* void excExit()*/FUNC_BEGIN(excExit)	/* restore dedicated and scratch registers */	lwz	r0, _PPC_ESF_R0(sp)		/* restore general register 0 */	lwz	r2, _PPC_ESF_R2(sp)		/* restore general register 2 */#if	TRUE					/* optimization to test */	/* restore volatile registers */	lwz	p1, _PPC_ESF_P1(sp)		/* restore general register 4 */	lwz	p2, _PPC_ESF_P2(sp)		/* restore general register 5 */	lwz	p3, _PPC_ESF_P3(sp)		/* restore general register 6 */	lwz	p4, _PPC_ESF_P4(sp)		/* restore general register 7 */	lwz	p5, _PPC_ESF_P5(sp)		/* restore general register 8 */	lwz	p6, _PPC_ESF_P6(sp)		/* restore general register 9 */	lwz	p7, _PPC_ESF_P7(sp)		/* restore general reg 10 */	lwz	r11, _PPC_ESF_R11(sp)		/* restore general reg 11 */	lwz	r12, _PPC_ESF_R12(sp)		/* restore general reg 12 */	lwz	r13, _PPC_ESF_R13(sp)		/* restore general reg 13 */	/* restore non-volatile registers */	/* 	 * XXX TPR the non-volatile should not be resoted because they are	 * not destroyed. To test or verify 	 */	lwz	t0, _PPC_ESF_T0(sp)		/* restore general reg 14 */	lwz	t1, _PPC_ESF_T1(sp)		/* restore general reg 15 */	lwz	t2, _PPC_ESF_T2(sp)		/* restore general reg 16 */	lwz	t3, _PPC_ESF_T3(sp)		/* restore general reg 17 */	lwz	t4, _PPC_ESF_T4(sp)		/* restore general reg 18 */	lwz	t5, _PPC_ESF_T5(sp)		/* restore general reg 19 */	lwz	t6, _PPC_ESF_T6(sp)		/* restore general reg 20 */	lwz	t7, _PPC_ESF_T7(sp)		/* restore general reg 21 */	lwz	t8, _PPC_ESF_T8(sp)		/* restore general reg 22 */	lwz	t9, _PPC_ESF_T9(sp)		/* restore general reg 23 */	lwz	t10, _PPC_ESF_T10(sp)		/* restore general reg 24 */	lwz	t11, _PPC_ESF_T11(sp)		/* restore general reg 25 */	lwz	t12, _PPC_ESF_T12(sp)		/* restore general reg 26 */	lwz	t13, _PPC_ESF_T13(sp)		/* restore general reg 27 */	lwz	t14, _PPC_ESF_T14(sp)		/* restore general reg 28 */	lwz	t15, _PPC_ESF_T15(sp)		/* restore general reg 29 */	lwz	t16, _PPC_ESF_T16(sp)		/* restore general reg 30 */	lwz	t17, _PPC_ESF_T17(sp)		/* restore general reg 31 */#else	lmw	p1, _PPC_ESF_P1(sp)		/* restore general register 5 */						/* through 31 */#endif	/* TRUE	*/	/* restore user level special purpose registers */	lwz	p0, _PPC_ESF_CTR(sp)		/* load saved CTR  to P0*/	mtspr	CTR, p0				/* restore CTR */	lwz	p0, _PPC_ESF_XER(sp)		/* load saved XER to P0 */	mtspr	XER, p0				/* restore XER */	lwz	p0, _PPC_ESF_LR(sp)		/* load saved LR to P0 */	mtspr	LR, p0				/* restore LR */	lwz	p0, _PPC_ESF_CR(sp)		/* load the saved CR to P0 */	mtcrf	255,p0				/* restore CR */#if	(CPU==PPC601)	lwz	p0, _PPC_ESF_MQ(sp)		/* load saved MQ to P0 */	mtspr	MQ, p0				/* restore MQ */#endif	/* (CPU==PPC601) */#if	(CPU==PPC85XX)	lwz	p0, _PPC_ESF_SPEFSCR(sp)	/* load saved SPEFSCR to P0 */	mtspr	SPEFSCR, p0			/* restore SPEFSCR */#endif	/* (CPU==PPC85XX) */	/* XXX TPR this code can be optimized */	mfmsr	p0				/* read msr */#ifdef	_PPC_MSR_RI	RI_MASK(p0, p0 )			/* mask RI bit */#endif	/* _PPC_MSR_RI */	INT_MASK(p0,p0)				/* clear EE bit in msr */	mtmsr	p0				/* DISABLE INTERRUPT */	isync					/* synchronize */#ifdef 	_WRS_TLB_MISS_CLASS_SW	/*	 * Turn off MMU to keep SW TLB Miss handler from corrupting	 * SRR0, SRR1.	 */	lwz	p0, _PPC_ESF_PC(sp)		/* load PC to P0 and save */	mtspr	SPRG0,p0			/* it temporarily in SPRG0 */	lwz	p0, _PPC_ESF_MSR(sp)		/* load MSR to P0 and save */	mtspr	SPRG3, p0			/* it temporarily in SPRG3 */	lwz	p0, _PPC_ESF_P0(sp)		/* restore P0 and save */	mtspr	SPRG2,p0			/* it temporarily in SPRG2 */	lwz	sp, _PPC_ESF_SP(sp)		/* restore the SP(R1) */						/* turn off the MMU before */						/* to restore the SRR0/SRR1 */	mfmsr	p0				/* read msr */	rlwinm	p0,p0,0,28,25			/* disable Instr/Data trans */	mtmsr	p0				/* set msr */	isync					/* synchronization */	mfspr	p0, SPRG0			/* load SPRG0 to p0 */	mtspr	SRR0, p0			/* restore SRR0 (PC) */	mfspr	p0, SPRG3			/* load SPRG3 to p0 */	mtspr	SRR1, p0			/* restore SRR1 (MSR) */	mfspr	p0, SPRG2			/* restore P0 from SPRG2 */#else	/* !_WRS_TLB_MISS_CLASS_SW */	/*	 * both MMU-less and MMU with miss handler in HW use this code	 */	lwz	p0, _PPC_ESF_PC(sp)		/* load the saved PC to P0 */	mtspr	SRR0, p0			/* and restore SRR0 (PC) */	lwz	p0, _PPC_ESF_MSR(sp)		/* load the saved MSR to P0 */	mtspr	SRR1, p0			/* and restore SRR1 (MSR) */	lwz	p0, _PPC_ESF_P0(sp)		/* restore p0 */	lwz	sp, _PPC_ESF_SP(sp)		/* restore the stack pointer */#endif	/* _WRS_TLB_MISS_CLASS_SW */	rfi					/* return to context of the */						/* task that got exception */FUNC_END(excExit)#ifdef	_PPC_MSR_CE/******************************************************************************** excCrtEnt - default context saving routine upon critical exception entrance* NOTE: The stack pointer is already set to the exception stack frame pointer.*       The exception vector on the stack is saved as  vector offset +*       _EXC_CODE_SIZE.** NOMANUAL* void excCrtEnt()*/FUNC_BEGIN(excCrtEnt)	/* At the entry of this function, the following is done */	/* mtspr        SPRG2, p0       /@ save P0 to SPRG2 */	/* mfspr        p0, LR          /@ load LR to P0 */	/* bla          excCrtEnt       /@ call excCrtEnt */	/*	 * reserve a room equal to the size of the ESF. This memory space is	 * taken from the stack of the task which has produce the exception.	 * This memory space is used to save the processor critical register	 * values.	 */	stwu    sp, - _PPC_ESF_STK_SIZE(sp)     /* update SP */	stw     p0, _PPC_ESF_LR(sp)             /* save LR */	mfspr   p0, SPRG2                       /* load saved P0 */

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -