📄 sonicvibes.c
字号:
outb(addr & 0xff, io); for (u = 3; u > 0; u--, count >>= 8, io++) outb(count & 0xff, io);#else /* DMABYTEIO */ count >>= 1; count--; outl(addr, s->iodmac + SV_DMA_ADDR0); outl(count, s->iodmac + SV_DMA_COUNT0);#endif /* DMABYTEIO */ outb(0x14, s->iodmac + SV_DMA_MODE);}static inline unsigned get_dmaa(struct sv_state *s){#ifdef DMABYTEIO unsigned io = s->iodmaa+6, v = 0, u; for (u = 3; u > 0; u--, io--) { v <<= 8; v |= inb(io); } return v + 1;#else /* DMABYTEIO */ return (inl(s->iodmaa + SV_DMA_COUNT0) & 0xffffff) + 1;#endif /* DMABYTEIO */}static inline unsigned get_dmac(struct sv_state *s){#ifdef DMABYTEIO unsigned io = s->iodmac+6, v = 0, u; for (u = 3; u > 0; u--, io--) { v <<= 8; v |= inb(io); } return (v + 1) << 1;#else /* DMABYTEIO */ return ((inl(s->iodmac + SV_DMA_COUNT0) & 0xffffff) + 1) << 1;#endif /* DMABYTEIO */}static void wrindir(struct sv_state *s, unsigned char idx, unsigned char data){ outb(idx & 0x3f, s->ioenh + SV_CODEC_IADDR); udelay(10); outb(data, s->ioenh + SV_CODEC_IDATA); udelay(10);}static unsigned char rdindir(struct sv_state *s, unsigned char idx){ unsigned char v; outb(idx & 0x3f, s->ioenh + SV_CODEC_IADDR); udelay(10); v = inb(s->ioenh + SV_CODEC_IDATA); udelay(10); return v;}static void set_fmt(struct sv_state *s, unsigned char mask, unsigned char data){ unsigned long flags; spin_lock_irqsave(&s->lock, flags); outb(SV_CIDATAFMT | SV_CIADDR_MCE, s->ioenh + SV_CODEC_IADDR); if (mask) { s->fmt = inb(s->ioenh + SV_CODEC_IDATA); udelay(10); } s->fmt = (s->fmt & mask) | data; outb(s->fmt, s->ioenh + SV_CODEC_IDATA); udelay(10); outb(0, s->ioenh + SV_CODEC_IADDR); spin_unlock_irqrestore(&s->lock, flags); udelay(10);}static void frobindir(struct sv_state *s, unsigned char idx, unsigned char mask, unsigned char data){ outb(idx & 0x3f, s->ioenh + SV_CODEC_IADDR); udelay(10); outb((inb(s->ioenh + SV_CODEC_IDATA) & mask) ^ data, s->ioenh + SV_CODEC_IDATA); udelay(10);}#define REFFREQUENCY 24576000#define ADCMULT 512#define FULLRATE 48000static unsigned setpll(struct sv_state *s, unsigned char reg, unsigned rate){ unsigned long flags; unsigned char r, m=0, n=0; unsigned xm, xn, xr, xd, metric = ~0U; /* the warnings about m and n used uninitialized are bogus and may safely be ignored */ if (rate < 625000/ADCMULT) rate = 625000/ADCMULT; if (rate > 150000000/ADCMULT) rate = 150000000/ADCMULT; /* slight violation of specs, needed for continuous sampling rates */ for (r = 0; rate < 75000000/ADCMULT; r += 0x20, rate <<= 1); for (xn = 3; xn < 35; xn++) for (xm = 3; xm < 130; xm++) { xr = REFFREQUENCY/ADCMULT * xm / xn; xd = abs((signed)(xr - rate)); if (xd < metric) { metric = xd; m = xm - 2; n = xn - 2; } } reg &= 0x3f; spin_lock_irqsave(&s->lock, flags); outb(reg, s->ioenh + SV_CODEC_IADDR); udelay(10); outb(m, s->ioenh + SV_CODEC_IDATA); udelay(10); outb(reg+1, s->ioenh + SV_CODEC_IADDR); udelay(10); outb(r | n, s->ioenh + SV_CODEC_IDATA); spin_unlock_irqrestore(&s->lock, flags); udelay(10); return (REFFREQUENCY/ADCMULT * (m + 2) / (n + 2)) >> ((r >> 5) & 7);}#if 0static unsigned getpll(struct sv_state *s, unsigned char reg){ unsigned long flags; unsigned char m, n; reg &= 0x3f; spin_lock_irqsave(&s->lock, flags); outb(reg, s->ioenh + SV_CODEC_IADDR); udelay(10); m = inb(s->ioenh + SV_CODEC_IDATA); udelay(10); outb(reg+1, s->ioenh + SV_CODEC_IADDR); udelay(10); n = inb(s->ioenh + SV_CODEC_IDATA); spin_unlock_irqrestore(&s->lock, flags); udelay(10); return (REFFREQUENCY/ADCMULT * (m + 2) / ((n & 0x1f) + 2)) >> ((n >> 5) & 7);}#endifstatic void set_dac_rate(struct sv_state *s, unsigned rate){ unsigned div; unsigned long flags; if (rate > 48000) rate = 48000; if (rate < 4000) rate = 4000; div = (rate * 65536 + FULLRATE/2) / FULLRATE; if (div > 65535) div = 65535; spin_lock_irqsave(&s->lock, flags); wrindir(s, SV_CIPCMSR1, div >> 8); wrindir(s, SV_CIPCMSR0, div); spin_unlock_irqrestore(&s->lock, flags); s->ratedac = (div * FULLRATE + 32768) / 65536;}static void set_adc_rate(struct sv_state *s, unsigned rate){ unsigned long flags; unsigned rate1, rate2, div; if (rate > 48000) rate = 48000; if (rate < 4000) rate = 4000; rate1 = setpll(s, SV_CIADCPLLM, rate); div = (48000 + rate/2) / rate; if (div > 8) div = 8; rate2 = (48000 + div/2) / div; spin_lock_irqsave(&s->lock, flags); wrindir(s, SV_CIADCALTSR, (div-1) << 4); if (abs((signed)(rate-rate2)) <= abs((signed)(rate-rate1))) { wrindir(s, SV_CIADCCLKSOURCE, 0x10); s->rateadc = rate2; } else { wrindir(s, SV_CIADCCLKSOURCE, 0x00); s->rateadc = rate1; } spin_unlock_irqrestore(&s->lock, flags);}/* --------------------------------------------------------------------- */static inline void stop_adc(struct sv_state *s){ unsigned long flags; spin_lock_irqsave(&s->lock, flags); s->enable &= ~SV_CENABLE_RE; wrindir(s, SV_CIENABLE, s->enable); spin_unlock_irqrestore(&s->lock, flags);} static inline void stop_dac(struct sv_state *s){ unsigned long flags; spin_lock_irqsave(&s->lock, flags); s->enable &= ~(SV_CENABLE_PPE | SV_CENABLE_PE); wrindir(s, SV_CIENABLE, s->enable); spin_unlock_irqrestore(&s->lock, flags);} static void start_dac(struct sv_state *s){ unsigned long flags; spin_lock_irqsave(&s->lock, flags); if ((s->dma_dac.mapped || s->dma_dac.count > 0) && s->dma_dac.ready) { s->enable = (s->enable & ~SV_CENABLE_PPE) | SV_CENABLE_PE; wrindir(s, SV_CIENABLE, s->enable); } spin_unlock_irqrestore(&s->lock, flags);} static void start_adc(struct sv_state *s){ unsigned long flags; spin_lock_irqsave(&s->lock, flags); if ((s->dma_adc.mapped || s->dma_adc.count < (signed)(s->dma_adc.dmasize - 2*s->dma_adc.fragsize)) && s->dma_adc.ready) { s->enable |= SV_CENABLE_RE; wrindir(s, SV_CIENABLE, s->enable); } spin_unlock_irqrestore(&s->lock, flags);} /* --------------------------------------------------------------------- */#define DMABUF_DEFAULTORDER (17-PAGE_SHIFT)#define DMABUF_MINORDER 1static void dealloc_dmabuf(struct sv_state *s, struct dmabuf *db){ struct page *page, *pend; if (db->rawbuf) { /* undo marking the pages as reserved */ pend = virt_to_page(db->rawbuf + (PAGE_SIZE << db->buforder) - 1); for (page = virt_to_page(db->rawbuf); page <= pend; page++) mem_map_unreserve(page); pci_free_consistent(s->dev, PAGE_SIZE << db->buforder, db->rawbuf, db->dmaaddr); } db->rawbuf = NULL; db->mapped = db->ready = 0;}/* DMAA is used for playback, DMAC is used for recording */static int prog_dmabuf(struct sv_state *s, unsigned rec){ struct dmabuf *db = rec ? &s->dma_adc : &s->dma_dac; unsigned rate = rec ? s->rateadc : s->ratedac; int order; unsigned bytepersec; unsigned bufs; struct page *page, *pend; unsigned char fmt; unsigned long flags; spin_lock_irqsave(&s->lock, flags); fmt = s->fmt; if (rec) { s->enable &= ~SV_CENABLE_RE; fmt >>= SV_CFMT_CSHIFT; } else { s->enable &= ~SV_CENABLE_PE; fmt >>= SV_CFMT_ASHIFT; } wrindir(s, SV_CIENABLE, s->enable); spin_unlock_irqrestore(&s->lock, flags); fmt &= SV_CFMT_MASK; db->hwptr = db->swptr = db->total_bytes = db->count = db->error = db->endcleared = 0; if (!db->rawbuf) { db->ready = db->mapped = 0; for (order = DMABUF_DEFAULTORDER; order >= DMABUF_MINORDER; order--) if ((db->rawbuf = pci_alloc_consistent(s->dev, PAGE_SIZE << order, &db->dmaaddr))) break; if (!db->rawbuf) return -ENOMEM; db->buforder = order; if ((virt_to_bus(db->rawbuf) ^ (virt_to_bus(db->rawbuf) + (PAGE_SIZE << db->buforder) - 1)) & ~0xffff) printk(KERN_DEBUG "sv: DMA buffer crosses 64k boundary: busaddr 0x%lx size %ld\n", virt_to_bus(db->rawbuf), PAGE_SIZE << db->buforder); if ((virt_to_bus(db->rawbuf) + (PAGE_SIZE << db->buforder) - 1) & ~0xffffff) printk(KERN_DEBUG "sv: DMA buffer beyond 16MB: busaddr 0x%lx size %ld\n", virt_to_bus(db->rawbuf), PAGE_SIZE << db->buforder); /* now mark the pages as reserved; otherwise remap_page_range doesn't do what we want */ pend = virt_to_page(db->rawbuf + (PAGE_SIZE << db->buforder) - 1); for (page = virt_to_page(db->rawbuf); page <= pend; page++) mem_map_reserve(page); } bytepersec = rate << sample_shift[fmt]; bufs = PAGE_SIZE << db->buforder; if (db->ossfragshift) { if ((1000 << db->ossfragshift) < bytepersec) db->fragshift = ld2(bytepersec/1000); else db->fragshift = db->ossfragshift; } else { db->fragshift = ld2(bytepersec/100/(db->subdivision ? db->subdivision : 1)); if (db->fragshift < 3) db->fragshift = 3; } db->numfrag = bufs >> db->fragshift; while (db->numfrag < 4 && db->fragshift > 3) { db->fragshift--; db->numfrag = bufs >> db->fragshift; } db->fragsize = 1 << db->fragshift; if (db->ossmaxfrags >= 4 && db->ossmaxfrags < db->numfrag) db->numfrag = db->ossmaxfrags; db->fragsamples = db->fragsize >> sample_shift[fmt]; db->dmasize = db->numfrag << db->fragshift; memset(db->rawbuf, (fmt & SV_CFMT_16BIT) ? 0 : 0x80, db->dmasize); spin_lock_irqsave(&s->lock, flags); if (rec) { set_dmac(s, db->dmaaddr, db->numfrag << db->fragshift); /* program enhanced mode registers */ wrindir(s, SV_CIDMACBASECOUNT1, (db->fragsamples-1) >> 8); wrindir(s, SV_CIDMACBASECOUNT0, db->fragsamples-1); } else { set_dmaa(s, db->dmaaddr, db->numfrag << db->fragshift); /* program enhanced mode registers */ wrindir(s, SV_CIDMAABASECOUNT1, (db->fragsamples-1) >> 8); wrindir(s, SV_CIDMAABASECOUNT0, db->fragsamples-1); } spin_unlock_irqrestore(&s->lock, flags); db->enabled = 1; db->ready = 1; return 0;}static inline void clear_advance(struct sv_state *s){ unsigned char c = (s->fmt & (SV_CFMT_16BIT << SV_CFMT_ASHIFT)) ? 0 : 0x80; unsigned char *buf = s->dma_dac.rawbuf; unsigned bsize = s->dma_dac.dmasize; unsigned bptr = s->dma_dac.swptr; unsigned len = s->dma_dac.fragsize; if (bptr + len > bsize) { unsigned x = bsize - bptr; memset(buf + bptr, c, x); bptr = 0; len -= x; } memset(buf + bptr, c, len);}/* call with spinlock held! */static void sv_update_ptr(struct sv_state *s){ unsigned hwptr; int diff; /* update ADC pointer */ if (s->dma_adc.ready) { hwptr = (s->dma_adc.dmasize - get_dmac(s)) % s->dma_adc.dmasize; diff = (s->dma_adc.dmasize + hwptr - s->dma_adc.hwptr) % s->dma_adc.dmasize; s->dma_adc.hwptr = hwptr; s->dma_adc.total_bytes += diff; s->dma_adc.count += diff; if (s->dma_adc.count >= (signed)s->dma_adc.fragsize) wake_up(&s->dma_adc.wait); if (!s->dma_adc.mapped) { if (s->dma_adc.count > (signed)(s->dma_adc.dmasize - ((3 * s->dma_adc.fragsize) >> 1))) { s->enable &= ~SV_CENABLE_RE; wrindir(s, SV_CIENABLE, s->enable); s->dma_adc.error++; } } } /* update DAC pointer */ if (s->dma_dac.ready) { hwptr = (s->dma_dac.dmasize - get_dmaa(s)) % s->dma_dac.dmasize; diff = (s->dma_dac.dmasize + hwptr - s->dma_dac.hwptr) % s->dma_dac.dmasize; s->dma_dac.hwptr = hwptr; s->dma_dac.total_bytes += diff; if (s->dma_dac.mapped) { s->dma_dac.count += diff; if (s->dma_dac.count >= (signed)s->dma_dac.fragsize) wake_up(&s->dma_dac.wait); } else { s->dma_dac.count -= diff; if (s->dma_dac.count <= 0) { s->enable &= ~SV_CENABLE_PE; wrindir(s, SV_CIENABLE, s->enable); s->dma_dac.error++; } else if (s->dma_dac.count <= (signed)s->dma_dac.fragsize && !s->dma_dac.endcleared) { clear_advance(s); s->dma_dac.endcleared = 1; } if (s->dma_dac.count + (signed)s->dma_dac.fragsize <= (signed)s->dma_dac.dmasize) wake_up(&s->dma_dac.wait); } }}/* hold spinlock for the following! */static void sv_handle_midi(struct sv_state *s){ unsigned char ch; int wake; wake = 0; while (!(inb(s->iomidi+1) & 0x80)) { ch = inb(s->iomidi); if (s->midi.icnt < MIDIINBUF) { s->midi.ibuf[s->midi.iwr] = ch; s->midi.iwr = (s->midi.iwr + 1) % MIDIINBUF; s->midi.icnt++; } wake = 1; } if (wake) wake_up(&s->midi.iwait); wake = 0; while (!(inb(s->iomidi+1) & 0x40) && s->midi.ocnt > 0) { outb(s->midi.obuf[s->midi.ord], s->iomidi); s->midi.ord = (s->midi.ord + 1) % MIDIOUTBUF; s->midi.ocnt--; if (s->midi.ocnt < MIDIOUTBUF-16) wake = 1; } if (wake) wake_up(&s->midi.owait);}static void sv_interrupt(int irq, void *dev_id, struct pt_regs *regs){ struct sv_state *s = (struct sv_state *)dev_id; unsigned int intsrc; /* fastpath out, to ease interrupt sharing */ intsrc = inb(s->ioenh + SV_CODEC_STATUS); if (!(intsrc & (SV_CSTAT_DMAA | SV_CSTAT_DMAC | SV_CSTAT_MIDI)))
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -