⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 hnddma.c

📁 wi-fi sources for asus wl138g v2 pci card
💻 C
📖 第 1 页 / 共 4 页
字号:
/* 32 bits DMA functions */static voiddma32_txinit(dma_info_t *di){	DMA_TRACE(("%s: dma_txinit\n", di->name));	if (di->ntxd == 0)		return;	di->txin = di->txout = 0;	di->hnddma.txavail = di->ntxd - 1;	/* clear tx descriptor ring */	BZERO_SM((void *)di->txd32, (di->ntxd * sizeof(dma32dd_t)));	W_REG(&di->d32txregs->control, XC_XE);	_dma_ddtable_init(di, DMA_TX, di->txdpa);}static booldma32_txenabled(dma_info_t *di){	uint32 xc;	/* If the chip is dead, it is not enabled :-) */	xc = R_REG(&di->d32txregs->control);	return ((xc != 0xffffffff) && (xc & XC_XE));}static voiddma32_txsuspend(dma_info_t *di){	DMA_TRACE(("%s: dma_txsuspend\n", di->name));	if (di->ntxd == 0)		return;	OR_REG(&di->d32txregs->control, XC_SE);}static voiddma32_txresume(dma_info_t *di){	DMA_TRACE(("%s: dma_txresume\n", di->name));	if (di->ntxd == 0)		return;	AND_REG(&di->d32txregs->control, ~XC_SE);}static booldma32_txsuspended(dma_info_t *di){	return (di->ntxd == 0) || ((R_REG(&di->d32txregs->control) & XC_SE) == XC_SE);}static voiddma32_txreclaim(dma_info_t *di, bool forceall){	void *p;	DMA_TRACE(("%s: dma_txreclaim %s\n", di->name, forceall ? "all" : ""));	while ((p = dma32_getnexttxp(di, forceall)))		PKTFREE(di->osh, p, TRUE);}static booldma32_txstopped(dma_info_t *di){	return ((R_REG(&di->d32txregs->status) & XS_XS_MASK) == XS_XS_STOPPED);}static booldma32_rxstopped(dma_info_t *di){	return ((R_REG(&di->d32rxregs->status) & RS_RS_MASK) == RS_RS_STOPPED);}static booldma32_alloc(dma_info_t *di, uint direction){	uint size;	uint ddlen;	void *va;	ddlen = sizeof(dma32dd_t);	size = (direction == DMA_TX) ? (di->ntxd * ddlen) : (di->nrxd * ddlen);	if (!ISALIGNED(DMA_CONSISTENT_ALIGN, D32RINGALIGN))		size += D32RINGALIGN;	if (direction == DMA_TX) {		if ((va = DMA_ALLOC_CONSISTENT(di->osh, size, &di->txdpa, &di->tx_dmah)) == NULL) {			DMA_ERROR(("%s: dma_attach: DMA_ALLOC_CONSISTENT(ntxd) failed\n",			           di->name));			return FALSE;		}		di->txd32 = (dma32dd_t *) ROUNDUP((uintptr)va, D32RINGALIGN);		di->txdalign = (uint)((int8*)di->txd32 - (int8*)va);		di->txdpa += di->txdalign;		di->txdalloc = size;		ASSERT(ISALIGNED((uintptr)di->txd32, D32RINGALIGN));	} else {		if ((va = DMA_ALLOC_CONSISTENT(di->osh, size, &di->rxdpa, &di->rx_dmah)) == NULL) {			DMA_ERROR(("%s: dma_attach: DMA_ALLOC_CONSISTENT(nrxd) failed\n",			           di->name));			return FALSE;		}		di->rxd32 = (dma32dd_t *) ROUNDUP((uintptr)va, D32RINGALIGN);		di->rxdalign = (uint)((int8*)di->rxd32 - (int8*)va);		di->rxdpa += di->rxdalign;		di->rxdalloc = size;		ASSERT(ISALIGNED((uintptr)di->rxd32, D32RINGALIGN));	}	return TRUE;}static booldma32_txreset(dma_info_t *di){	uint32 status;	if (di->ntxd == 0)		return TRUE;	/* suspend tx DMA first */	W_REG(&di->d32txregs->control, XC_SE);	SPINWAIT(((status = (R_REG(&di->d32txregs->status) & XS_XS_MASK)) != XS_XS_DISABLED) &&	         (status != XS_XS_IDLE) &&	         (status != XS_XS_STOPPED),	         10000);	W_REG(&di->d32txregs->control, 0);	SPINWAIT(((status = (R_REG(&di->d32txregs->status) & XS_XS_MASK)) != XS_XS_DISABLED),	         10000);	/* wait for the last transaction to complete */	OSL_DELAY(300);	return (status == XS_XS_DISABLED);}static booldma32_rxidle(dma_info_t *di){	DMA_TRACE(("%s: dma_rxidle\n", di->name));	if (di->nrxd == 0)		return TRUE;	return ((R_REG(&di->d32rxregs->status) & RS_CD_MASK) ==	        R_REG(&di->d32rxregs->ptr));}static booldma32_rxreset(dma_info_t *di){	uint32 status;	if (di->nrxd == 0)		return TRUE;	W_REG(&di->d32rxregs->control, 0);	SPINWAIT(((status = (R_REG(&di->d32rxregs->status) & RS_RS_MASK)) != RS_RS_DISABLED),	         10000);	return (status == RS_RS_DISABLED);}static booldma32_rxenabled(dma_info_t *di){	uint32 rc;	rc = R_REG(&di->d32rxregs->control);	return ((rc != 0xffffffff) && (rc & RC_RE));}static booldma32_txsuspendedidle(dma_info_t *di){	if (di->ntxd == 0)		return TRUE;	if (!(R_REG(&di->d32txregs->control) & XC_SE))		return 0;	if ((R_REG(&di->d32txregs->status) & XS_XS_MASK) != XS_XS_IDLE)		return 0;	OSL_DELAY(2);	return ((R_REG(&di->d32txregs->status) & XS_XS_MASK) == XS_XS_IDLE);}/* !! tx entry routine * supports full 32bit dma engine buffer addressing so * dma buffers can cross 4 Kbyte page boundaries. */static intdma32_txfast(dma_info_t *di, void *p0, bool commit){	void *p, *next;	uchar *data;	uint len;	uint txout;	uint32 flags = 0;	uint32 pa;	DMA_TRACE(("%s: dma_txfast\n", di->name));	txout = di->txout;	/*	 * Walk the chain of packet buffers	 * allocating and initializing transmit descriptor entries.	 */	for (p = p0; p; p = next) {		data = PKTDATA(di->osh, p);		len = PKTLEN(di->osh, p);		next = PKTNEXT(di->osh, p);		/* return nonzero if out of tx descriptors */		if (NEXTTXD(txout) == di->txin)			goto outoftxd;		if (len == 0)			continue;		/* get physical address of buffer start */		pa = (uint32) DMA_MAP(di->osh, data, len, DMA_TX, p, &di->txp_dmah[txout]);		flags = 0;		if (p == p0)			flags |= CTRL_SOF;		if (next == NULL)			flags |= (CTRL_IOC | CTRL_EOF);		if (txout == (di->ntxd - 1))			flags |= CTRL_EOT;		dma32_dd_upd(di, di->txd32, pa, txout, &flags, len);		ASSERT(di->txp[txout] == NULL);		txout = NEXTTXD(txout);	}	/* if last txd eof not set, fix it */	if (!(flags & CTRL_EOF))		W_SM(&di->txd32[PREVTXD(txout)].ctrl, BUS_SWAP32(flags | CTRL_IOC | CTRL_EOF));	/* save the packet */	di->txp[PREVTXD(txout)] = p0;	/* bump the tx descriptor index */	di->txout = txout;	/* kick the chip */	if (commit)		W_REG(&di->d32txregs->ptr, I2B(txout, dma32dd_t));	/* tx flow control */	di->hnddma.txavail = di->ntxd - NTXDACTIVE(di->txin, di->txout) - 1;	return (0);outoftxd:	DMA_ERROR(("%s: dma_txfast: out of txds\n", di->name));	PKTFREE(di->osh, p0, TRUE);	di->hnddma.txavail = 0;	di->hnddma.txnobuf++;	return (-1);}/* * Reclaim next completed txd (txds if using chained buffers) and * return associated packet. * If 'force' is true, reclaim txd(s) and return associated packet * regardless of the value of the hardware "curr" pointer. */static void *dma32_getnexttxp(dma_info_t *di, bool forceall){	uint start, end, i;	void *txp;	DMA_TRACE(("%s: dma_getnexttxp %s\n", di->name, forceall ? "all" : ""));	if (di->ntxd == 0)		return (NULL);	txp = NULL;	start = di->txin;	if (forceall)		end = di->txout;	else		end = B2I(R_REG(&di->d32txregs->status) & XS_CD_MASK, dma32dd_t);	if ((start == 0) && (end > di->txout))		goto bogus;	for (i = start; i != end && !txp; i = NEXTTXD(i)) {		DMA_UNMAP(di->osh, (BUS_SWAP32(R_SM(&di->txd32[i].addr)) - di->dataoffsetlow),		          (BUS_SWAP32(R_SM(&di->txd32[i].ctrl)) & CTRL_BC_MASK),		          DMA_TX, di->txp[i], &di->txp_dmah[i]);		W_SM(&di->txd32[i].addr, 0xdeadbeef);		txp = di->txp[i];		di->txp[i] = NULL;	}	di->txin = i;	/* tx flow control */	di->hnddma.txavail = di->ntxd - NTXDACTIVE(di->txin, di->txout) - 1;	return (txp);bogus:/*	DMA_ERROR(("dma_getnexttxp: bogus curr: start %d end %d txout %d force %d\n",		start, end, di->txout, forceall));*/	return (NULL);}static void *dma32_getnextrxp(dma_info_t *di, bool forceall){	uint i;	void *rxp;	/* if forcing, dma engine must be disabled */	ASSERT(!forceall || !dma32_rxenabled(di));	i = di->rxin;	/* return if no packets posted */	if (i == di->rxout)		return (NULL);	/* ignore curr if forceall */	if (!forceall && (i == B2I(R_REG(&di->d32rxregs->status) & RS_CD_MASK, dma32dd_t)))		return (NULL);	/* get the packet pointer that corresponds to the rx descriptor */	rxp = di->rxp[i];	ASSERT(rxp);	di->rxp[i] = NULL;	/* clear this packet from the descriptor ring */	DMA_UNMAP(di->osh, (BUS_SWAP32(R_SM(&di->rxd32[i].addr)) - di->dataoffsetlow),	          di->rxbufsize, DMA_RX, rxp, &di->rxp_dmah[i]);	W_SM(&di->rxd32[i].addr, 0xdeadbeef);	di->rxin = NEXTRXD(i);	return (rxp);}/* * Rotate all active tx dma ring entries "forward" by (ActiveDescriptor - txin). */static voiddma32_txrotate(dma_info_t *di){	uint ad;	uint nactive;	uint rot;	uint old, new;	uint32 w;	uint first, last;	ASSERT(dma32_txsuspendedidle(di));	nactive = _dma_txactive(di);	ad = B2I(((R_REG(&di->d32txregs->status) & XS_AD_MASK) >> XS_AD_SHIFT), dma32dd_t);	rot = TXD(ad - di->txin);	ASSERT(rot < di->ntxd);	/* full-ring case is a lot harder - don't worry about this */	if (rot >= (di->ntxd - nactive)) {		DMA_ERROR(("%s: dma_txrotate: ring full - punt\n", di->name));		return;	}	first = di->txin;	last = PREVTXD(di->txout);	/* move entries starting at last and moving backwards to first */	for (old = last; old != PREVTXD(first); old = PREVTXD(old)) {		new = TXD(old + rot);		/*		 * Move the tx dma descriptor.		 * EOT is set only in the last entry in the ring.		 */		w = BUS_SWAP32(R_SM(&di->txd32[old].ctrl)) & ~CTRL_EOT;		if (new == (di->ntxd - 1))			w |= CTRL_EOT;		W_SM(&di->txd32[new].ctrl, BUS_SWAP32(w));		W_SM(&di->txd32[new].addr, R_SM(&di->txd32[old].addr));		/* zap the old tx dma descriptor address field */		W_SM(&di->txd32[old].addr, BUS_SWAP32(0xdeadbeef));		/* move the corresponding txp[] entry */		ASSERT(di->txp[new] == NULL);		di->txp[new] = di->txp[old];		di->txp[old] = NULL;	}	/* update txin and txout */	di->txin = ad;	di->txout = TXD(di->txout + rot);	di->hnddma.txavail = di->ntxd - NTXDACTIVE(di->txin, di->txout) - 1;	/* kick the chip */	W_REG(&di->d32txregs->ptr, I2B(di->txout, dma32dd_t));}/* 64 bits DMA functions */#ifdef BCMDMA64static voiddma64_txinit(dma_info_t *di){	DMA_TRACE(("%s: dma_txinit\n", di->name));	if (di->ntxd == 0)		return;	di->txin = di->txout = 0;	di->hnddma.txavail = di->ntxd - 1;	/* clear tx descriptor ring */	BZERO_SM((void *)di->txd64, (di->ntxd * sizeof(dma64dd_t)));	W_REG(&di->d64txregs->control, D64_XC_XE);	_dma_ddtable_init(di, DMA_TX, di->txdpa);}static booldma64_txenabled(dma_info_t *di)

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -