📄 cachealib.s
字号:
ADD r1, r0, #MINI_CACHE_SIZE1: LDR r2, [r0], #MINI_CACHE_LINE_SIZE /* Displace minicache entries */ TEQS r1, r0 /* Reached end of buffer? */ BNE 1b /* Branch if not */#endif#if (ARMCACHE == ARMCACHE_810) MOV r1, #63 /* 64 indices to clean */ LDR r2, L$_cacheArchIntMask /* Get pointer to cacheArchIntMask */ LDR r2, [r2] /* get cacheArchIntMask */ MRS r3, cpsr /* Get CPSR */ ORR r2, r3, r2 /* disable interrupts */ MSR cpsr, r25: MOV r2, #(7<<4) /* 8 segments */6: ORR r0, r2, r1, LSL #26 /* Create Index, Seg format */ MCR CP_MMU, 0, r0, c7, c11, 1 /* Clean ID-cache entry */ SUBS r2, r2, #(1<<4) /* step on to next segment */ BPL 6b /* branch if not done all segs */ SUBS r1, r1, #1 /* step on to next index */ BPL 5b /* branch if not done all indices */ /* All Index, Seg entries cleaned */#endif#if (ARMCACHE == ARMCACHE_940T) MOV r1, #63 /* 64 indices to clean */ LDR r2, L$_cacheArchIntMask /* Get pointer to cacheArchIntMask */ LDR r2, [r2] /* get cacheArchIntMask */ MRS r3, cpsr /* Get CPSR */ ORR r2, r3, r2 /* disable interrupts */ MSR cpsr, r25: MOV r2, #(3<<4) /* 4 segments */6: ORR r0, r2, r1, LSL #26 /* Create Index, Seg format */ MCR CP_MMU, 0, r0, c7, c14, 1 /* Clean & invalidate D-cache entry */ SUBS r2, r2, #(1<<4) /* step on to next segment */ BPL 6b /* branch if not done all segs */ SUBS r1, r1, #1 /* step on to next index */ BPL 5b /* branch if not done all indices */ /* All Index, Seg entries cleaned and * invalidated */#endif#if (ARMCACHE == ARMCACHE_946E) LDR r1, L$_cacheArchIndexMask /* Get ptr to index mask */ LDR r2, L$_cacheArchIntMask /* Get pointer to cacheArchIntMask */ LDR r1, [r1] /* num indices to clean - 1 shifted */ LDR r2, [r2] /* get cacheArchIntMask */ MRS r3, cpsr /* Get CPSR */ ORR r2, r3, r2 /* disable interrupts */ MSR cpsr, r25: MOV r2, #(3<<30) /* 4 segments */6: ORR r0, r2, r1 /* Create Index, Seg format */ MCR CP_MMU, 0, r0, c7, c14, 2 /* Clean & invalidate D-cache entry */ SUBS r2, r2, #(1<<30) /* step on to next segment */ BHS 6b /* branch if not done all segs */ SUBS r1, r1, #(1<<5) /* step on to next index */ BHS 5b /* branch if not done all indices */ /* All Index, Seg entries cleaned and * invalidated */ MOV r0, #0 /* Data SBZ */ MCR CP_MMU, 0, r0, c7, c10, 4 /* Drain write-buffer */#endif /* (ARMCACHE == ARMCACHE_946E) */#if (ARMCACHE == ARMCACHE_920T) MOV r1, #63 /* 64 indices to clean */ LDR r2, L$_cacheArchIntMask /* Get pointer to cacheArchIntMask */ LDR r2, [r2] /* get cacheArchIntMask */ MRS r3, cpsr /* Get CPSR */ ORR r2, r3, r2 /* disable interrupts */ MSR cpsr, r25: MOV r2, #(7<<5) /* 8 segments */6: ORR r0, r2, r1, LSL #26 /* Create Index, Seg format */ MCR CP_MMU, 0, r0, c7, c14, 2 /* Clean & invalidate D-cache entry */ SUBS r2, r2, #(1<<5) /* step on to next segment */ BPL 6b /* branch if not done all segs */ SUBS r1, r1, #1 /* step on to next index */ BPL 5b /* branch if not done all indices */ /* All Index, Seg entries cleaned and * invalidated */ /* Ensure victim pointer does not point to locked entries */ MRC CP_MMU, 0, r0, c9, c0, 0 /* Read D-cache lockdown base */ MCR CP_MMU, 0, r0, c9, c0, 0 /* Write D-cache lockdown base */ MOV r0, #0 /* Data SBZ */ MCR CP_MMU, 0, r0, c7, c10, 4 /* Drain write-buffer */#endif /* (ARMCACHE == ARMCACHE_920T) */#if (ARMCACHE == ARMCACHE_926E) LDR r2, L$_cacheArchIntMask /* Get pointer to cacheArchIntMask */ LDR r2, [r2] /* get cacheArchIntMask */ MRS r3, cpsr /* Get CPSR */ ORR r2, r3, r2 /* disable interrupts */ MSR cpsr, r25: MRC CP_MMU, 0, pc, c7, c14, 3 /* test, clean & invalidate */ BNE 5b /* branch if dirty */ MOV r0, #0 /* Data SBZ */ MCR CP_MMU, 0, r0, c7, c10, 4 /* Drain write-buffer */#endif /* (ARMCACHE == ARMCACHE_926E) */#if (ARMCACHE == ARMCACHE_1020E) || (ARMCACHE == ARMCACHE_1022E) LDR r1, L$_cacheArchIndexMask /* Get ptr to index mask */ LDR r2, L$_cacheArchIntMask /* Get pointer to cacheArchIntMask */ LDR ip, L$_cacheArchSegMask /* Get pointer to segment mask */ LDR r1, [r1] /* num indices to clean - 1 shifted */ LDR r2, [r2] /* get cacheArchIntMask */ LDR ip, [ip] /* get num segs to clean -1 shifted */ MRS r3, cpsr /* Get CPSR */ ORR r2, r3, r2 /* disable interrupts */ MSR cpsr, r2#if ARMCACHE_1020E_REV0_DRAIN_WB /* Rev 0 errata */ MOV r0, #0 /* Data SBZ */ MCR CP_MMU, 0, r0, c7, c10, 4 /* Drain write-buffer */#endif /* ARMCACHE_1020E_REV0_DRAIN_WB */5: MOV r2, ip /* max num segments */6: ORR r0, r2, r1 /* Create Index, Seg format */ MCR CP_MMU, 0, r0, c7, c14, 2 /* Clean & invalidate D-cache entry */#if ARMCACHE_1020E_REV0_MCR_CP15 NOP NOP#endif SUBS r2, r2, #(1<<5) /* step on to next segment */ BHS 6b /* branch if not done all segs */ SUBS r1, r1, #(1<<26) /* step on to next index */ BHS 5b /* branch if not done all indices */ /* All Index, Seg entries cleaned and * invalidated */ MOV r0, #0 /* Data SBZ */ MCR CP_MMU, 0, r0, c7, c10, 4 /* Drain write-buffer */#endif /* (ARMCACHE == ARMCACHE_1020E,1022E) *//* All D-cache has now been cleaned (written to memory) */#if ((ARMCACHE == ARMCACHE_710A) || (ARMCACHE == ARMCACHE_720T) || \ (ARMCACHE == ARMCACHE_740T)) MRS r3, cpsr /* Get CPSR */ ORR r2, r3, #I_BIT | F_BIT /* disable all interrupts */ MSR cpsr, r2 LDR r0, L$_cacheSwapVar /* R0 -> FUNC(_cacheSwapVar) */ SWPB r1, r1, [r0] /* Drain write-buffer */#if (ARMCACHE == ARMCACHE_720T) MOV r0, #0 MCR CP_MMU, 0, r0, c7, c7, 0 /* Flush (invalidate) all ID-cache */#else MCR CP_MMU, 0, r0, c7, c0, 0 /* Flush (invalidate) all ID-cache */#endif#endif /* (710A, 720T, 740T) */#if (ARMCACHE == ARMCACHE_810) LDR r0, L$_cacheSwapVar /* R0 -> FUNC(_cacheSwapVar) */ SWPB r1, r1, [r0] /* Drain write-buffer */ MOV r0, #0 MCR CP_MMU, 0, r0, c7, c7, 0 /* Flush (invalidate) all ID-cache */#endif#if ((ARMCACHE == ARMCACHE_SA110) || (ARMCACHE == ARMCACHE_SA1100) || \ (ARMCACHE == ARMCACHE_SA1500) || (ARMCACHE == ARMCACHE_XSCALE)) MCR CP_MMU, 0, r0, c7, c10, 4 /* Drain write-buffer */ MCR CP_MMU, 0, r0, c7, c6, 0 /* Flush (invalidate) all D-cache */#endif#if (ARMCACHE == ARMCACHE_940T) LDR r0, L$_sysCacheUncachedAdrs LDR r0, [r0] /* R0 -> uncached area */ LDR r0, [r0] /* Drain write-buffer */ /* no need to invalidate, as we used the clean and invalidate op */#endif /* ARMCACHE == ARMCACHE_940T */#if (ARMCACHE == ARMCACHE_710A) LDR ip, L$_mmuCrValue /* Get pointer to soft-copy */ LDR r2, [ip] /* Load soft copy */#else MRC CP_MMU, 0, r2, c1, c0, 0 /* Read control register into R2 */#endif/* Disable D-cache and write-buffer */#if ((ARMCACHE == ARMCACHE_920T) || (ARMCACHE == ARMCACHE_926E) || \ (ARMCACHE == ARMCACHE_940T) || (ARMCACHE == ARMCACHE_946E) || \ (ARMCACHE == ARMCACHE_XSCALE) || (ARMCACHE == ARMCACHE_1020E) || \ (ARMCACHE == ARMCACHE_1022E)) /* on 920T,926E,940T,946E,XSCALE,1020E W bit is Should Be One (SBO) */ BIC r2, r2, #MMUCR_C_ENABLE#else BIC r2, r2, #MMUCR_C_ENABLE | MMUCR_W_ENABLE#endif#if (ARMCACHE == ARMCACHE_710A) STR r2, [ip] /* Store soft-copy */#endif MCR CP_MMU, 0, r2, c1, c0, 0 /* Write control register */#if ((ARMCACHE == ARMCACHE_1020E) && ARMCACHE_1020E_REV0_MCR_CP15) NOP#endif MSR cpsr, r3 /* Restore interrupt state */#if (ARM_THUMB) BX lr#else MOV pc, lr#endif#if ((ARMCACHE == ARMCACHE_810) || (ARMCACHE == ARMCACHE_SA110) || \ (ARMCACHE == ARMCACHE_SA1100) || (ARMCACHE == ARMCACHE_SA1500) || \ (ARMCACHE == ARMCACHE_920T) || (ARMCACHE == ARMCACHE_926E) || \ (ARMCACHE == ARMCACHE_940T) || (ARMCACHE == ARMCACHE_946E) || \ (ARMCACHE == ARMCACHE_XSCALE) || (ARMCACHE == ARMCACHE_1020E) || \ (ARMCACHE == ARMCACHE_1022E))/* Not needed on 710A, 720T, 740T as cacheArchLib just drains W/B instead *//********************************************************************************* cacheDFlush - flush D-cache entry (ARM)** This routine flushes (writes to memory) an entry in the Data Cache.** NOMANUAL** RETURNS: N/A** void cacheDFlush* (* void * addr /@ virtual address to be flushed @/* )*/_ARM_FUNCTION_CALLED_FROM_C(cacheDFlush)#if ((ARMCACHE == ARMCACHE_SA110) || (ARMCACHE == ARMCACHE_SA1100) || \ (ARMCACHE == ARMCACHE_SA1500) || (ARMCACHE == ARMCACHE_920T) || \ (ARMCACHE == ARMCACHE_926E) || (ARMCACHE == ARMCACHE_946E) || \ (ARMCACHE == ARMCACHE_XSCALE) || (ARMCACHE == ARMCACHE_1020E) || \ (ARMCACHE == ARMCACHE_1022E))#if ARMCACHE_1020E_REV0_DRAIN_WB /* Rev 0 errata */ MOV r1, #0 /* Data SBZ */ MCR CP_MMU, 0, r1, c7, c10, 4 /* Drain write-buffer */#endif /* ARMCACHE_1020E_REV0_DRAIN_WB */ MCR CP_MMU, 0, r0, c7, c10, 1 /* Clean D-cache entry using VA */#if ARMCACHE_1020E_REV0_DRAIN_WB /* Rev 0 errata */ MCR CP_MMU, 0, r1, c7, c10, 4 /* Drain write-buffer */#endif /* ARMCACHE_1020E_REV0_DRAIN_WB */#if ((ARMCACHE == ARMCACHE_1020E) && ARMCACHE_1020E_REV0_MCR_CP15) NOP NOP#endif /* ((ARMCACHE == ARMCACHE_1020E) && ARMCACHE_1020E_REV0_MCR_CP15) */#endif#if (ARMCACHE == ARMCACHE_810) AND r0, r0, #0x70 /* r0 now contains segment number */ /* in which addr will be cached */ MOV r1, #63 /* 64 indices to clean */1: ORR r2, r0, r1, LSL #26 /* Create Index, Seg format */ MCR CP_MMU, 0, r2, c7, c11, 1 /* Clean ID-cache entry */ SUBS r1, r1, #1 /* step on to next index */ BPL 1b /* branch if not done all indices */#endif#if (ARMCACHE == ARMCACHE_940T) AND r0, r0, #0x30 /* r0 now contains segment number */ /* in which addr will be cached */ MOV r1, #63 /* 64 indices to clean */1: ORR r2, r0, r1, LSL #26 /* Create Index, Seg format */ MCR CP_MMU, 0, r2, c7, c10, 1 /* Clean D-cache entry */ SUBS r1, r1, #1 /* step on to next index */ BPL 1b /* branch if not done all indices */#endif#if (ARM_THUMB) BX lr#else MOV pc, lr#endif#endif /* (ARMCACHE == ARMCACHE_810,SA*,920T,926E,940T,946E,XSCALE,1020E,1022E) */#if ((ARMCACHE == ARMCACHE_810) || (ARMCACHE == ARMCACHE_SA110) || \ (ARMCACHE == ARMCACHE_SA1100) || (ARMCACHE == ARMCACHE_SA1500) || \ (ARMCACHE == ARMCACHE_920T) || (ARMCACHE == ARMCACHE_926E) || \ (ARMCACHE == ARMCACHE_940T) || (ARMCACHE == ARMCACHE_946E) || \ (ARMCACHE == ARMCACHE_XSCALE) || (ARMCACHE == ARMCACHE_1020E) || \ (ARMCACHE == ARMCACHE_1022E))/********************************************************************************* cacheDFlushAll - flush all D-cache (ARM)** This routine flushes (writes out to memory) the Data Cache, and drains the* write-buffer.** NOMANUAL** RETURNS: N/A** void cacheDFlushAll (void)*/_ARM_FUNCTION_CALLED_FROM_C(cacheDFlushAll)#if ((ARMCACHE == ARMCACHE_SA110) || (ARMCACHE == ARMCACHE_SA1100) || \ (ARMCACHE == ARMCACHE_SA1500) || (ARMCACHE == ARMCACHE_XSCALE)) /* * The following method works by displacing entries as the * StrongArm cache is LRU. 810 is random replacement. */ LDR r0, L$_sysCacheFlushReadArea ADD r1, r0, #D_CACHE_SIZE /* End of buffer to read */0: LDR r2, [r0], #_CACHE_ALIGN_SIZE /* Displace cache entries */ TEQS r1, r0 /* Reached end of buffer? */ BNE 0b /* Branch if not */ /* All D-cache has now been cleaned */ /* (written to memory) */#endif#if ((ARMCACHE == ARMCACHE_SA1100) || (ARMCACHE == ARMCACHE_SA1500) || \ (ARMCACHE == ARMCACHE_XSCALE)) LDR r0, L$_sysMinicacheFlushReadArea ADD r1, r0, #MINI_CACHE_SIZE1: LDR r2, [r0], #MINI_CACHE_LINE_SIZE /* Displace minicache entries */ TEQS r1, r0 /* Reached end of buffer? */ BNE 1b /* Branch if not */#endif#if (ARMCACHE == ARMCACHE_810) MOV r1, #63 /* 64 indices to clean */5: MOV r2, #(7<<4) /* 8 segments */6: ORR r0, r2, r1, LSL #26 /* Create Index, Seg format */ MCR CP_MMU, 0, r0, c7, c11, 1 /* Clean ID-cache entry */ SUBS r2, r2, #(1<<4) /* step on to next segment */ BPL 6b /* branch if not done all segs */ SUBS r1, r1, #1 /* step on to next index */ BPL 5b /* branch if not done all indices */ /* All Index, Seg entries cleaned */#endif /* (ARMCACHE == ARMCACHE_810) */#if (ARMCACHE == ARMCACHE_940T) MOV r1, #63 /* 64 indices to clean */5: MOV r2, #(3<<4) /* 4 segments */6: ORR r0, r2, r1, LSL #26 /* Create Index, Seg format */ MCR CP_MMU, 0, r0, c7, c10, 1 /* Clean D-cache entry */ SUBS r2, r2, #(1<<4) /* step on to next segment */ BPL 6b /* branch if not done all segs */ SUBS r1, r1, #1 /* step on to next index */ BPL 5b /* branch if not done all indices */ /* All Index, Seg entries cleaned */#endif /* (ARMCACHE == ARMCACHE_940T) */#if (ARMCACHE == ARMCACHE_946E)
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -