⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 cachealib.s

📁 VxWorks BSP框架源代码包含头文件和驱动
💻 S
📖 第 1 页 / 共 5 页
字号:
	ORR	r2, r3, r2		/* disable interrupts */	MSR	cpsr, r20:	LDR	r2, [r0], #_CACHE_ALIGN_SIZE /* Displace cache entries */	TEQS	r1, r0			/* Reached end of buffer? */	BNE	0b			/* Branch if not */	/* All D-cache has now been cleaned (written to memory) */#if ((ARMCACHE == ARMCACHE_SA1100) || (ARMCACHE == ARMCACHE_SA1500) || \     (ARMCACHE == ARMCACHE_XSCALE))	LDR	r0, L$_sysMinicacheFlushReadArea	ADD	r1, r0, #MINI_CACHE_SIZE1:	LDR	r2, [r0], #MINI_CACHE_LINE_SIZE /* Displace minicache entries */	TEQS	r1, r0			/* Reached end of buffer? */	BNE	1b			/* Branch if not */#endif	MCR	CP_MMU, 0, r0, c7, c10, 4 /* Drain write-buffer */	MCR	CP_MMU, 0, r0, c7, c6, 0 /* Flush (invalidate) D-cache */	MSR	cpsr, r3		/* Restore interrupt state */#endif#if (ARMCACHE == ARMCACHE_810)	MOV	r1, #63			/* 64 indices to clean */	LDR	r2, L$_cacheArchIntMask	/* Get pointer to cacheArchIntMask */	LDR	r2, [r2]		/* get cacheArchIntMask */	MRS	r3, cpsr		/* Get CPSR */	ORR	r2, r3, r2		/* disable interrupts */	MSR	cpsr, r25:	MOV	r2, #(7<<4)		/* 8 segments */6:	ORR	r0, r2, r1, LSL #26	/* Create Index, Seg format */	MCR	CP_MMU, 0, r0, c7, c11, 1 /* Clean ID-cache entry */	SUBS	r2, r2, #(1<<4)		/* step on to next segment */	BPL	6b			/* branch if not done all segs */	SUBS	r1, r1, #1		/* step on to next index */	BPL	5b			/* branch if not done all indices */					/* All Index, Seg entries cleaned */	LDR	r0, L$_cacheSwapVar	/* R0 -> FUNC(_cacheSwapVar) */	SWPB	r1, r1, [r0]		/* Drain write-buffer */	/* All cache is now cleaned */	MOV	r0, #0	MCR	CP_MMU, 0, r0, c7, c7, 0 /* Flush (invalidate) all ID-cache */	MSR	cpsr, r3		/* Restore interrupt state */#endif#if (ARMCACHE == ARMCACHE_940T)	MOV	r1, #63			/* 64 indices to clean */	LDR	r2, L$_cacheArchIntMask	/* Get pointer to cacheArchIntMask */	LDR	r2, [r2]		/* get cacheArchIntMask */	MRS	r3, cpsr		/* Get CPSR */	ORR	r2, r3, r2		/* disable interrupts */	MSR	cpsr, r25:	MOV	r2, #(3<<4)		/* 4 segments */6:	ORR	r0, r2, r1, LSL #26	/* Create Index, Seg format */	MCR	CP_MMU, 0, r0, c7, c14, 1 /* Clean & invalidate D-cache entry */	SUBS	r2, r2, #(1<<4)		/* step on to next segment */	BPL	6b			/* branch if not done all segs */	SUBS	r1, r1, #1		/* step on to next index */	BPL	5b			/* branch if not done all indices */					/* All Index, Seg entries cleaned and					 * invalidated */	LDR	r0, L$_sysCacheUncachedAdrs	LDR	r0, [r0]		/* R0 -> uncached area */	LDR	r0, [r0]		/* drain write-buffer */	MSR	cpsr, r3		/* Restore interrupt state */#endif#if (ARMCACHE == ARMCACHE_946E)	LDR	r1, L$_cacheArchIndexMask /* Get ptr to index mask */	LDR	r2, L$_cacheArchIntMask	/* Get pointer to cacheArchIntMask */	LDR	r1, [r1]		/* num of indices -1 shifted */	LDR	r2, [r2]		/* get cacheArchIntMask */	MRS	r3, cpsr		/* Get CPSR */	ORR	r2, r3, r2		/* disable interrupts */	MSR	cpsr, r25:	MOV	r2, #(3<<30)		/* 4 segments */6:	ORR	r0, r2, r1		/* Create Index, Seg format */	MCR	CP_MMU, 0, r0, c7, c14, 2 /* Clean & invalidate D-cache entry */	SUBS	r2, r2, #(1<<30)	/* step on to next segment */	BHS	6b			/* branch if not done all segs */	SUBS	r1, r1, #(1<<5)		/* step on to next index */	BHS	5b			/* branch if not done all indices */					/* All Index, Seg entries cleaned and					 * invalidated */	MOV	r0, #0			/* Data SBZ */	MCR	CP_MMU, 0, r0, c7, c10, 4 /* Drain write-buffer */	MSR	cpsr, r3		/* Restore interrupt state */#endif /* ARMCACHE_946E */#if (ARMCACHE == ARMCACHE_920T)	MOV	r1, #63			/* 64 indices to clean */	LDR	r2, L$_cacheArchIntMask	/* Get pointer to cacheArchIntMask */	LDR	r2, [r2]		/* get cacheArchIntMask */	MRS	r3, cpsr		/* Get CPSR */	ORR	r2, r3, r2		/* disable interrupts */	MSR	cpsr, r25:	MOV	r2, #(7<<5)		/* 8 segments */6:	ORR	r0, r2, r1, LSL #26	/* Create Index, Seg format */	MCR	CP_MMU, 0, r0, c7, c14, 2 /* Clean & invalidate D-cache entry */	SUBS	r2, r2, #(1<<5)		/* step on to next segment */	BPL	6b			/* branch if not done all segs */	SUBS	r1, r1, #1		/* step on to next index */	BPL	5b			/* branch if not done all indices */					/* All Index, Seg entries cleaned and					 * invalidated */	/* Ensure victim pointer does not point to locked entries */	MRC	CP_MMU, 0, r0, c9, c0, 0  /* Read D-cache lockdown base */	MCR	CP_MMU, 0, r0, c9, c0, 0  /* Write D-cache lockdown base */	MOV	r0, #0			/* Data SBZ */	MCR	CP_MMU, 0, r0, c7, c10, 4 /* Drain write-buffer */	MSR	cpsr, r3		/* Restore interrupt state */#endif /* (ARMCACHE == ARMCACHE_920T) */#if (ARMCACHE == ARMCACHE_926E)        LDR     r2, L$_cacheArchIntMask /* Get pointer to cacheArchIntMask */        LDR     r2, [r2]                /* get cacheArchIntMask */        MRS     r3, cpsr                /* Get CPSR */        ORR     r2, r3, r2              /* disable interrupts */        MSR     cpsr, r25:        MRC     CP_MMU, 0, pc, c7, c14, 3  /* test, clean & invalidate */        BNE     5b                      /* branch if dirty */        MOV     r0, #0                  /* Data SBZ */        MCR     CP_MMU, 0, r0, c7, c10, 4 /* Drain write-buffer */        MSR     cpsr, r3                /* Restore interrupt state */#endif /* (ARMCACHE == ARMCACHE_926E) */#if (ARMCACHE == ARMCACHE_1020E) || (ARMCACHE == ARMCACHE_1022E)        LDR     r1, L$_cacheArchIndexMask /* Get ptr to index mask */        LDR     r2, L$_cacheArchIntMask /* Get pointer to cacheArchIntMask */        LDR     ip, L$_cacheArchSegMask /* Get pointer to segment mask */        LDR     r1, [r1]                /* num indices to clean - 1 shifted */        LDR     r2, [r2]                /* get cacheArchIntMask */        LDR     ip, [ip]                /* get num segs to clean -1 shifted */        MRS     r3, cpsr                /* Get CPSR */        ORR     r2, r3, r2              /* disable interrupts */        MSR     cpsr, r2#if ARMCACHE_1020E_REV0_DRAIN_WB        /* Rev 0 errata */        MOV     r0, #0                  /* Data SBZ */        MCR     CP_MMU, 0, r0, c7, c10, 4 /* Drain write-buffer */#endif /* ARMCACHE_1020E_REV0_DRAIN_WB */5:        MOV     r2, ip                  /* max num segments */6:        ORR     r0, r2, r1              /* Create Index, Seg format */        MCR     CP_MMU, 0, r0, c7, c14, 2 /* Clean & invalidate D-cache entry */#if ARMCACHE_1020E_REV0_MCR_CP15        NOP        NOP#endif /* ARMCACHE_1020E_REV0_MCR_CP15 */        SUBS    r2, r2, #(1<<5)         /* step on to next segment */        BHS     6b                      /* branch if not done all segs */        SUBS    r1, r1, #(1<<26)        /* step on to next index */        BHS     5b                      /* branch if not done all indices */                                        /* All Index, Seg entries cleaned and                                         * invalidated */        MOV     r0, #0                  /* Data SBZ */        MCR     CP_MMU, 0, r0, c7, c10, 4 /* Drain write-buffer */        MSR     cpsr, r3                /* Restore interrupt state */#if ARMCACHE_1020E_REV0_MCR_CP15        NOP#endif /* ARMCACHE_1020E_REV0_MCR_CP15 */#endif /* (ARMCACHE == ARMCACHE_1020E,1022E) */#if ((ARMCACHE == ARMCACHE_710A) || (ARMCACHE == ARMCACHE_740T))	LDR	r0, L$_cacheSwapVar	/* R0 -> FUNC(_cacheSwapVar) */	SWPB	r1, r1, [r0]		/* Drain write-buffer */	MCR	CP_MMU, 0, r0, c7, c0, 0 /* Flush (invalidate) all ID-cache */#endif#if (ARMCACHE == ARMCACHE_720T)	LDR	r0, L$_cacheSwapVar	/* R0 -> FUNC(_cacheSwapVar) */	SWPB	r1, r1, [r0]		/* Drain write-buffer */	MOV	r0, #0	MCR	CP_MMU, 0, r0, c7, c7, 0 /* Flush (invalidate) all ID-cache */#endif#if (ARM_THUMB)	BX	lr#else	MOV	pc, lr#endif#if ((ARMCACHE == ARMCACHE_810)    || (ARMCACHE == ARMCACHE_SA110)  || \     (ARMCACHE == ARMCACHE_SA1100) || (ARMCACHE == ARMCACHE_SA1500) || \     (ARMCACHE == ARMCACHE_920T)   || (ARMCACHE == ARMCACHE_926E)   || \     (ARMCACHE == ARMCACHE_940T)   || (ARMCACHE == ARMCACHE_946E)   || \     (ARMCACHE == ARMCACHE_XSCALE) || (ARMCACHE == ARMCACHE_1020E)  || \     (ARMCACHE == ARMCACHE_1022E))/* Not supported on 710A, 740T, 720T *//********************************************************************************* cacheDClear - clear (flush and invalidate) D-cache entry (ARM)** This routine clears (flushes and invalidates) an entry in the Data Cache** INTERNAL* This routine is called from cacheArchLib, after which it drains the* write buffer so there is no need to do it here.** NOMANUAL** RETURNS: N/A** void cacheDClear*     (*     void *	addr	/@ virtual address to be cleared @/*     )*/_ARM_FUNCTION_CALLED_FROM_C(cacheDClear)#if ((ARMCACHE == ARMCACHE_SA110)  || (ARMCACHE == ARMCACHE_SA1100) || \     (ARMCACHE == ARMCACHE_SA1500) || (ARMCACHE == ARMCACHE_XSCALE))	/*	 * Other ARM CPUs have ints locked since they have to clean, then	 * invalidate cache entries for addresses other than for the	 * address specified. If on SA-110 you call this routine without	 * ints locked, and other processes can be dirtying the address	 * specified, then you are asking for trouble.	 */	MCR	CP_MMU, 0, r0, c7, c10, 1 /* Clean D-cache entry */	MCR	CP_MMU, 0, r0, c7, c6, 1 /* Flush (invalidate) D-cache entry */#endif /* (ARMCACHE == ARMCACHE_SA110,1100,1500) */#if ((ARMCACHE == ARMCACHE_920T)   || (ARMCACHE == ARMCACHE_926E)   || \     (ARMCACHE == ARMCACHE_946E)   || (ARMCACHE == ARMCACHE_1020E)  || \     (ARMCACHE == ARMCACHE_1022E))#if ARMCACHE_1020E_REV0_DRAIN_WB        /* Rev 0 errata */        MOV     r1, #0                  /* Data SBZ */        MCR     CP_MMU, 0, r1, c7, c10, 4 /* Drain write-buffer */#endif /* ARMCACHE_1020E_REV0_DRAIN_WB */        /*         * Bits [0:4] SBZ, but will be, as this is called from         * cacheArchClear(), which will have ANDed off those bits.         */        MCR     CP_MMU, 0, r0, c7, c14, 1 /* Clean and Inval D-cache entry */#if ARMCACHE_1020E_REV0_DRAIN_WB        /* Rev 0 errata */        MCR     CP_MMU, 0, r1, c7, c10, 4 /* Drain write-buffer */#endif /* ARMCACHE_1020E_REV0_DRAIN_WB */#if ((ARMCACHE == ARMCACHE_1020E) && ARMCACHE_1020E_REV0_MCR_CP15)        NOP        NOP#endif /* ((ARMCACHE == ARMCACHE_1020E) && ARMCACHE_1020E_REV0_MCR_CP15) */#endif /* (ARMCACHE == ARMCACHE_920T,926E,946E,1020E) */#if (ARMCACHE == ARMCACHE_810)	LDR	r2, L$_cacheArchIntMask	/* Get pointer to cacheArchIntMask */	LDR	r2, [r2]		/* get cacheArchIntMask */	MRS	r3, cpsr		/* Get CPSR */	ORR	r2, r3, r2		/* disable interrupts */	MSR	cpsr, r2	AND	r0, r0, #0x70		/* r0 now contains segment number */					/* in which addr will be cached */	MOV	r1, #63			/* 64 indices to clean */1:	ORR	r2, r0, r1, LSL #26	/* Create Index, Seg format */	MCR	CP_MMU, 0, r2, c7, c11, 1 /* Clean ID-cache entry */	MCR	CP_MMU, 0, r2, c7, c7, 1 /* Invalidate ID-cache entry */	SUBS	r1, r1, #1		/* step on to next index */	BPL	1b			/* branch if not done all indices */	MSR	cpsr, r3		/* Restore interrupt state */#endif /* ARMCACHE == ARMCACHE_810) */#if (ARMCACHE == ARMCACHE_940T)	LDR	r2, L$_cacheArchIntMask	/* Get pointer to cacheArchIntMask */	LDR	r2, [r2]		/* get cacheArchIntMask */	MRS	r3, cpsr		/* Get CPSR */	ORR	r2, r3, r2		/* disable interrupts */	MSR	cpsr, r2	AND	r0, r0, #0x30		/* r0 now contains segment number */					/* in which addr will be cached */	MOV	r1, #63			/* 64 indices to clean */1:	ORR	r2, r0, r1, LSL #26	/* Create Index, Seg format */	MCR	CP_MMU, 0, r2, c7, c14, 1 /* Clean & invalidate D-cache entry */	SUBS	r1, r1, #1		/* step on to next index */	BPL	1b			/* branch if not done all indices */	MSR	cpsr, r3		/* Restore interrupt state */#endif#if (ARM_THUMB)	BX	lr#else	MOV	pc, lr#endif#endif /* (ARMCACHE == 810,SA110,1100,1500,920T,926E,940T,946E,XSCALE,1020E,1022E) */#if ((ARMCACHE == ARMCACHE_SA110)   || (ARMCACHE == ARMCACHE_SA1100) || \     (ARMCACHE == ARMCACHE_SA1500)  || (ARMCACHE == ARMCACHE_920T)   || \     (ARMCACHE == ARMCACHE_926E)   || (ARMCACHE == ARMCACHE_940T)    || \     (ARMCACHE == ARMCACHE_946E)   || (ARMCACHE == ARMCACHE_XSCALE)  || \     (ARMCACHE == ARMCACHE_1020E)  || (ARMCACHE == ARMCACHE_1022E))/********************************************************************************* cacheIClearDisable - disable and clear I-cache (ARM)** This routine disables and clears (flushes and invalidates) the Instruction* Cache.** NOMANUAL** RETURNS: N/A** void cacheIClearDisable (void)*/_ARM_FUNCTION_CALLED_FROM_C(cacheIClearDisable)	MRS	r3, cpsr		/* Get CPSR */	ORR	r2, r3, #I_BIT | F_BIT	/* disable all interrupts */	MSR	cpsr, r2	MRC	CP_MMU, 0, r2, c1, c0, 0 /* Read control register */	BIC	r2, r2, #MMUCR_I_ENABLE	 /* Disable I-cache */	MCR	CP_MMU, 0, r2, c1, c0, 0 /* Write control register */#if ((ARMCACHE == ARMCACHE_920T)   || (ARMCACHE == ARMCACHE_926E)    || \     (ARMCACHE == ARMCACHE_946E)   || (ARMCACHE == ARMCACHE_1020E)   || \     (ARMCACHE == ARMCACHE_1022E))	MOV	r0, #0			/* data SBZ */#endif	MCR	CP_MMU, 0, r0, c7, c5, 0 /* Flush (invalidate) all I-cache */#if (ARMCACHE == ARMCACHE_XSCALE)        /* assure that CP15 update takes effect */        MRC     CP_MMU, 0, r0, c2, c0, 0 /* arbitrary read of CP15 */        MOV     r0, r0                   /* wait for it */        SUB     pc, pc, #4               /* branch to next instruction */#else	/*	 * The next four instructions could still come from the I-cache (2 on	 * the 940T, 3 on 920T). We also need to flush the prefetch unit,	 * which will be done by the MOV pc, lr below, (or any interrupt).	 */#if ((ARMCACHE == ARMCACHE_SA110)  || \     (ARMCACHE == ARMCACHE_SA1100) || (ARMCACHE == ARMCACHE_SA1500))	NOP				/* 4 */	NOP				/* 3 */#endif#if ((ARMCACHE == ARMCACHE_920T)   || (ARMCACHE == ARMCACHE_926E)    || \     (ARMCACHE == ARMCACHE_1020E)  || (ARMCACHE == ARMCACHE_1022E))	NOP				/* 3 */#endif	NOP				/* 2 */#endif /* (ARMCACHE == ARMCACHE_XSCALE) */	MSR	cpsr, r3		/* 1. Restore interrupt state */#if (ARM_THUMB)

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -