📄 excalib.s
字号:
stw p0, _PPC_ESF_MSR(sp) /* save MSR in ESF */ stw p1, _PPC_ESF_P1(sp) /* save general register P1 */ mfcr p1 /* load CR to P1 */ stw p1, _PPC_ESF_CR(sp) /* save CR */ /* * Before we reenable the MMU, we need to ensure that the values * we pushed on the stack above are flushed out of cache. */ dcbf 0, sp /* push SP value to memory */ li p1, _PPC_ESF_LR dcbf p1, sp /* push LR value to memory */ li p1, _PPC_ESF_P0 dcbf p1, sp /* push P0 value to memory */ li p1, _PPC_ESF_PC dcbf p1, sp /* push PC value to memory */ li p1, _PPC_ESF_MSR dcbf p1, sp /* push MSR value to memory */ li p1, _PPC_ESF_P1 dcbf p1, sp /* push P1 value to memory */ li p1, _PPC_ESF_CR dcbf p1, sp /* push CR value to memory */ sync /* * Now turn the data or/and instruction MMU on if they were * previously turned on. * the critical registers are save. Now the interrupt and machine * check can be re-enabled. If an interrupt or exception is detected * the previous state can be reconstructed. */ mfmsr p1 /* p1 = MSRval current */ /* p0 should have MSRval app */ mtspr SPRG4_W, p1 /* SPRG4 = MSRval current */ lis p1, HI( _MSR_MCE | _MSR_CE | _MSR_IS | _MSR_DS | _PPC_MSR_EE ) ori p1, p1, LO( _MSR_MCE | _MSR_CE | _MSR_IS | _MSR_DS | _PPC_MSR_EE ) and. p0, p1, p0 /* extract app IS,DS,ME,CE,EE */ mfspr p1, SPRG4_W /* p1 = MSRval before */ or p1, p1, p0 /* p1 = MSRval current with */ /* app IS,DS,ME,CE,EE */ mtmsr p1 /* ENABLE INTERRUPT & MMU */ isync /* synchronize */ mfspr p0, LR /* p0 = exception number */ /* may be wrong if relocated *//* * The LR value is offset from the vector address by the size of the * bla and other instructions preceding it in excCrtConnectCode (or * in excExtCrtConnectCode if excExtendedVectors is in effect). * * The offset amounts to 4 * (ENT_CRT_OFF + 1) or 4 * EXT_ISR_CRT_OFF * respectively, however these symbols are defined in excArchLib.c * and the definitions are not accessible here. */ lis p1, HIADJ(excExtendedVectors) lwz p1, LO(excExtendedVectors)(p1) /* get excExtendedVectors */ cmpwi p1, 0 /* if 0, short vectors */ beq mchkShortVec li p1, 20 /* 4 * (EXT_ISR_CRT_OFF - (ENT_CRT_OFF + 1)) */mchkShortVec: addi p1, p1, 12 /* 4 * (ENT_CRT_OFF + 1) */ sub p0, p0, p1 stw p0, _PPC_ESF_VEC_OFF(sp) /* store to ESF */ mfspr p0, CTR /* load CTR to P0 */ stw p0, _PPC_ESF_CTR(sp) /* save CTR */ mfspr p1, XER /* load XER to P0 */ stw p1, _PPC_ESF_XER(sp) /* save XER */ /* * SPEFP code in handler not supported. No need to save for now. */#if FALSE#if (CPU == PPC85XX) mfspr p1, SPEFSCR /* load SPEFSCR to P1 */ stw p1, _PPC_ESF_SPEFSCR(sp) /* save SPEFSCR */#endif /* (CPU == PPC85XX) */#endif /* FALSE */#ifdef _PPC_MSR_MCE mfspr p0, MCAR /* load MCAR to P0 */ stw p0, _PPC_ESF_DEAR(sp) /* save MCAR */ mfspr p0, MCSR /* load MCSR to P0 */ stw p0, _PPC_ESF_ESR(sp) /* save MCSR */#endif /* _PPC_MSR_MCE */ stw r0, _PPC_ESF_R0(sp) /* save general register 0 */ addi r0, r1, _PPC_ESF_STK_SIZE stw r0, _PPC_ESF_R1(sp) /* save exception sp */ stw r2, _PPC_ESF_R2(sp) /* save general register 2 */# if TRUE /* optimization to test */ /* save the volatile register values on the ESF */ stw p2, _PPC_ESF_P2(sp) /* save general register 5 */ stw p3, _PPC_ESF_P3(sp) /* save general register 6 */ stw p4, _PPC_ESF_P4(sp) /* save general register 7 */ stw p5, _PPC_ESF_P5(sp) /* save general register 8 */ stw p6, _PPC_ESF_P6(sp) /* save general register 9 */ stw p7, _PPC_ESF_P7(sp) /* save general register 10 */ stw r11, _PPC_ESF_R11(sp) /* save general register 11 */ stw r12, _PPC_ESF_R12(sp) /* save general register 12 */ stw r13, _PPC_ESF_R13(sp) /* save general register 13 */ /* save the non volatile register values on the ESF */ stw t0, _PPC_ESF_T0(sp) /* save general register 14 */ stw t1, _PPC_ESF_T1(sp) /* save general register 15 */ stw t2, _PPC_ESF_T2(sp) /* save general register 16 */ stw t3, _PPC_ESF_T3(sp) /* save general register 17 */ stw t4, _PPC_ESF_T4(sp) /* save general register 18 */ stw t5, _PPC_ESF_T5(sp) /* save general register 19 */ stw t6, _PPC_ESF_T6(sp) /* save general register 20 */ stw t7, _PPC_ESF_T7(sp) /* save general register 21 */ stw t8, _PPC_ESF_T8(sp) /* save general register 22 */ stw t9, _PPC_ESF_T9(sp) /* save general register 23 */ stw t10, _PPC_ESF_T10(sp) /* save general register 24 */ stw t11, _PPC_ESF_T11(sp) /* save general register 25 */ stw t12, _PPC_ESF_T12(sp) /* save general register 26 */ stw t13, _PPC_ESF_T13(sp) /* save general register 27 */ stw t14, _PPC_ESF_T14(sp) /* save general register 28 */ stw t15, _PPC_ESF_T15(sp) /* save general register 29 */ stw t16, _PPC_ESF_T16(sp) /* save general register 30 */ stw t17, _PPC_ESF_T17(sp) /* save general register 31 */# else /* TRUE */ stmw p2, _PPC_ESF_P2(sp) /* save general register 5 */ /* through 31 */# endif /* TRUE */ blr /* return to caller */FUNC_END(excMchkEnt)/********************************************************************************* excMchkExit - default context restore routine on machine check exception exit** NOMANUAL* void excMchkExit()*/FUNC_BEGIN(excMchkExit) /* restore dedicated and scratch registers */ lwz r0, _PPC_ESF_R0(sp) /* restore general register 0 */ lwz r2, _PPC_ESF_R2(sp) /* restore general register 2 */# if TRUE /* optimization to test */ /* restore volatile registers */ lwz p1, _PPC_ESF_P1(sp) /* restore general register 4 */ lwz p2, _PPC_ESF_P2(sp) /* restore general register 5 */ lwz p3, _PPC_ESF_P3(sp) /* restore general register 6 */ lwz p4, _PPC_ESF_P4(sp) /* restore general register 7 */ lwz p5, _PPC_ESF_P5(sp) /* restore general register 8 */ lwz p6, _PPC_ESF_P6(sp) /* restore general register 9 */ lwz p7, _PPC_ESF_P7(sp) /* restore general reg 10 */ lwz r11, _PPC_ESF_R11(sp) /* restore general reg 11 */ lwz r12, _PPC_ESF_R12(sp) /* restore general reg 12 */ lwz r13, _PPC_ESF_R13(sp) /* restore general reg 13 */ /* restore non-volatile registers */ /* * XXX TPR the non-volatile should not be restored because they are * not destroyed. To test or verify */ lwz t0, _PPC_ESF_T0(sp) /* restore general reg 14 */ lwz t1, _PPC_ESF_T1(sp) /* restore general reg 15 */ lwz t2, _PPC_ESF_T2(sp) /* restore general reg 16 */ lwz t3, _PPC_ESF_T3(sp) /* restore general reg 17 */ lwz t4, _PPC_ESF_T4(sp) /* restore general reg 18 */ lwz t5, _PPC_ESF_T5(sp) /* restore general reg 19 */ lwz t6, _PPC_ESF_T6(sp) /* restore general reg 20 */ lwz t7, _PPC_ESF_T7(sp) /* restore general reg 21 */ lwz t8, _PPC_ESF_T8(sp) /* restore general reg 22 */ lwz t9, _PPC_ESF_T9(sp) /* restore general reg 23 */ lwz t10, _PPC_ESF_T10(sp) /* restore general reg 24 */ lwz t11, _PPC_ESF_T11(sp) /* restore general reg 25 */ lwz t12, _PPC_ESF_T12(sp) /* restore general reg 26 */ lwz t13, _PPC_ESF_T13(sp) /* restore general reg 27 */ lwz t14, _PPC_ESF_T14(sp) /* restore general reg 28 */ lwz t15, _PPC_ESF_T15(sp) /* restore general reg 29 */ lwz t16, _PPC_ESF_T16(sp) /* restore general reg 30 */ lwz t17, _PPC_ESF_T17(sp) /* restore general reg 31 */# else /* TRUE */ lmw p1, _PPC_ESF_P1(sp) /* restore general register 5 */ /* through 31 */# endif /* TRUE */ /* restore user level special purpose registers */ lwz p0, _PPC_ESF_CTR(sp) /* load saved CTR to P0*/ mtspr CTR, p0 /* restore CTR */ lwz p0, _PPC_ESF_XER(sp) /* load saved XER to P0 */ mtspr XER, p0 /* restore XER */ lwz p0, _PPC_ESF_LR(sp) /* load saved LR to P0 */ mtspr LR, p0 /* restore LR */ lwz p0, _PPC_ESF_CR(sp) /* load the saved CR to P0 */ mtcrf 255,p0 /* restore CR */ /* * SPEFP code in handler not supported. No need to save for now. */#if FALSE#if (CPU==PPC85XX) lwz p0, _PPC_ESF_SPEFSCR(sp) /* load saved SPEFSCR to P0 */ mtspr SPEFSCR, p0 /* restore SPEFSCR */#endif /* (CPU==PPC85XX) */#endif /* FALSE */ /* XXX TPR this code can be optimized */ mfmsr p0 /* read msr */# ifdef _PPC_MSR_RI RI_MASK(p0, p0 ) /* mask RI bit */# endif /* _PPC_MSR_RI */ INT_MASK(p0,p0) /* clear EE/CE bit in msr */ mtmsr p0 /* DISABLE INTERRUPT */ isync /* synchronize */ lwz p0, _PPC_ESF_PC(sp) /* load the saved PC to P0 */ mtspr MCSRR0, p0 /* and restore MCSRR0 */ lwz p0, _PPC_ESF_MSR(sp) /* load the saved MSR to P0 */ mtspr MCSRR1, p0 /* and restore MCSRR1 */ lwz p0, _PPC_ESF_P0(sp) /* restore p0 */ lwz sp, _PPC_ESF_SP(sp) /* restore the stack pointer */ rfmci /* return to context of the */ /* task that got exception */FUNC_END(excMchkExit)#endif /* _PPC_MSR_MCE */#endif /* _PPC_MSR_CE *//********************************************************************************* excEPSet - set exception vector prefix** NOMANUAL*/FUNC_BEGIN(excEPSet)#ifdef _PPC_MSR_IP mfmsr p7 /* load msr to p7 */ li p6, _PPC_MSR_IP /* load IP mask bit to p6 */ cmpwi p0, 0 /* is base address zero */ beq excEPClear /* goto clear IP bit */ or p7, p6, p7 /* set IP bit */ b excMsrSet /* go to set msr */excEPClear: rlwinm p7, p7, 0, 26, 24 /* clear _PPC_MSR_IP bit */excMsrSet: mtmsr p7 /* set msr */#endif /* _PPC_MSR_EP */ blr /* return to the caller */FUNC_END(excEPSet)#ifdef IVOR0/********************************************************************************* excIvorInit - set IVOR's as defined in excPpcLib.h** NOMANUAL** void excIvorInit(void)**/FUNC_EXPORT(excIvorInit)FUNC_BEGIN(excIvorInit) li p0,IVOR0_VAL mtspr IVOR0,p0 li p0,IVOR1_VAL mtspr IVOR1,p0 li p0,IVOR2_VAL mtspr IVOR2,p0 li p0,IVOR3_VAL mtspr IVOR3,p0 li p0,IVOR4_VAL mtspr IVOR4,p0 li p0,IVOR5_VAL mtspr IVOR5,p0 li p0,IVOR6_VAL mtspr IVOR6,p0 li p0,IVOR7_VAL mtspr IVOR7,p0 li p0,IVOR8_VAL mtspr IVOR8,p0 li p0,IVOR9_VAL mtspr IVOR9,p0 li p0,IVOR10_VAL mtspr IVOR10,p0 li p0,IVOR11_VAL mtspr IVOR11,p0 li p0,IVOR12_VAL mtspr IVOR12,p0 li p0,IVOR13_VAL mtspr IVOR13,p0 li p0,IVOR14_VAL mtspr IVOR14,p0 li p0,IVOR15_VAL mtspr IVOR15,p0#if (CPU==PPC85XX) li p0,IVOR32_VAL mtspr IVOR32,p0 li p0,IVOR33_VAL mtspr IVOR33,p0 li p0,IVOR34_VAL mtspr IVOR34,p0 li p0,IVOR35_VAL mtspr IVOR35,p0#endif /* (CPU==PPC85XX) */ blrFUNC_END(excIvorInit)#endif /* IVOR0 */
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -