1 /* ebus.c: EBUS DMA library code.
3 * Copyright (C) 1997 Eddie C. Dost (ecd@skynet.be)
4 * Copyright (C) 1999 David S. Miller (davem@redhat.com)
7 #include <linux/export.h>
8 #include <linux/kernel.h>
9 #include <linux/types.h>
10 #include <linux/interrupt.h>
11 #include <linux/delay.h>
13 #include <asm/ebus_dma.h>
16 #define EBDMA_CSR 0x00UL /* Control/Status */
17 #define EBDMA_ADDR 0x04UL /* DMA Address */
18 #define EBDMA_COUNT 0x08UL /* DMA Count */
20 #define EBDMA_CSR_INT_PEND 0x00000001
21 #define EBDMA_CSR_ERR_PEND 0x00000002
22 #define EBDMA_CSR_DRAIN 0x00000004
23 #define EBDMA_CSR_INT_EN 0x00000010
24 #define EBDMA_CSR_RESET 0x00000080
25 #define EBDMA_CSR_WRITE 0x00000100
26 #define EBDMA_CSR_EN_DMA 0x00000200
27 #define EBDMA_CSR_CYC_PEND 0x00000400
28 #define EBDMA_CSR_DIAG_RD_DONE 0x00000800
29 #define EBDMA_CSR_DIAG_WR_DONE 0x00001000
30 #define EBDMA_CSR_EN_CNT 0x00002000
31 #define EBDMA_CSR_TC 0x00004000
32 #define EBDMA_CSR_DIS_CSR_DRN 0x00010000
33 #define EBDMA_CSR_BURST_SZ_MASK 0x000c0000
34 #define EBDMA_CSR_BURST_SZ_1 0x00080000
35 #define EBDMA_CSR_BURST_SZ_4 0x00000000
36 #define EBDMA_CSR_BURST_SZ_8 0x00040000
37 #define EBDMA_CSR_BURST_SZ_16 0x000c0000
38 #define EBDMA_CSR_DIAG_EN 0x00100000
39 #define EBDMA_CSR_DIS_ERR_PEND 0x00400000
40 #define EBDMA_CSR_TCI_DIS 0x00800000
41 #define EBDMA_CSR_EN_NEXT 0x01000000
42 #define EBDMA_CSR_DMA_ON 0x02000000
43 #define EBDMA_CSR_A_LOADED 0x04000000
44 #define EBDMA_CSR_NA_LOADED 0x08000000
45 #define EBDMA_CSR_DEV_ID_MASK 0xf0000000
47 #define EBUS_DMA_RESET_TIMEOUT 10000
49 static void __ebus_dma_reset(struct ebus_dma_info *p, int no_drain)
54 writel(EBDMA_CSR_RESET, p->regs + EBDMA_CSR);
60 for (i = EBUS_DMA_RESET_TIMEOUT; i > 0; i--) {
61 val = readl(p->regs + EBDMA_CSR);
63 if (!(val & (EBDMA_CSR_DRAIN | EBDMA_CSR_CYC_PEND)))
69 static irqreturn_t ebus_dma_irq(int irq, void *dev_id)
71 struct ebus_dma_info *p = dev_id;
75 spin_lock_irqsave(&p->lock, flags);
76 csr = readl(p->regs + EBDMA_CSR);
77 writel(csr, p->regs + EBDMA_CSR);
78 spin_unlock_irqrestore(&p->lock, flags);
80 if (csr & EBDMA_CSR_ERR_PEND) {
81 printk(KERN_CRIT "ebus_dma(%s): DMA error!\n", p->name);
82 p->callback(p, EBUS_DMA_EVENT_ERROR, p->client_cookie);
84 } else if (csr & EBDMA_CSR_INT_PEND) {
86 (csr & EBDMA_CSR_TC) ?
87 EBUS_DMA_EVENT_DMA : EBUS_DMA_EVENT_DEVICE,
96 int ebus_dma_register(struct ebus_dma_info *p)
102 if (p->flags & ~(EBUS_DMA_FLAG_USE_EBDMA_HANDLER |
103 EBUS_DMA_FLAG_TCI_DISABLE))
105 if ((p->flags & EBUS_DMA_FLAG_USE_EBDMA_HANDLER) && !p->callback)
107 if (!strlen(p->name))
110 __ebus_dma_reset(p, 1);
112 csr = EBDMA_CSR_BURST_SZ_16 | EBDMA_CSR_EN_CNT;
114 if (p->flags & EBUS_DMA_FLAG_TCI_DISABLE)
115 csr |= EBDMA_CSR_TCI_DIS;
117 writel(csr, p->regs + EBDMA_CSR);
121 EXPORT_SYMBOL(ebus_dma_register);
123 int ebus_dma_irq_enable(struct ebus_dma_info *p, int on)
129 if (p->flags & EBUS_DMA_FLAG_USE_EBDMA_HANDLER) {
130 if (request_irq(p->irq, ebus_dma_irq, IRQF_SHARED, p->name, p))
134 spin_lock_irqsave(&p->lock, flags);
135 csr = readl(p->regs + EBDMA_CSR);
136 csr |= EBDMA_CSR_INT_EN;
137 writel(csr, p->regs + EBDMA_CSR);
138 spin_unlock_irqrestore(&p->lock, flags);
140 spin_lock_irqsave(&p->lock, flags);
141 csr = readl(p->regs + EBDMA_CSR);
142 csr &= ~EBDMA_CSR_INT_EN;
143 writel(csr, p->regs + EBDMA_CSR);
144 spin_unlock_irqrestore(&p->lock, flags);
146 if (p->flags & EBUS_DMA_FLAG_USE_EBDMA_HANDLER) {
153 EXPORT_SYMBOL(ebus_dma_irq_enable);
155 void ebus_dma_unregister(struct ebus_dma_info *p)
161 spin_lock_irqsave(&p->lock, flags);
162 csr = readl(p->regs + EBDMA_CSR);
163 if (csr & EBDMA_CSR_INT_EN) {
164 csr &= ~EBDMA_CSR_INT_EN;
165 writel(csr, p->regs + EBDMA_CSR);
168 spin_unlock_irqrestore(&p->lock, flags);
173 EXPORT_SYMBOL(ebus_dma_unregister);
175 int ebus_dma_request(struct ebus_dma_info *p, dma_addr_t bus_addr, size_t len)
181 if (len >= (1 << 24))
184 spin_lock_irqsave(&p->lock, flags);
185 csr = readl(p->regs + EBDMA_CSR);
187 if (!(csr & EBDMA_CSR_EN_DMA))
190 if (csr & EBDMA_CSR_NA_LOADED)
193 writel(len, p->regs + EBDMA_COUNT);
194 writel(bus_addr, p->regs + EBDMA_ADDR);
198 spin_unlock_irqrestore(&p->lock, flags);
202 EXPORT_SYMBOL(ebus_dma_request);
204 void ebus_dma_prepare(struct ebus_dma_info *p, int write)
209 spin_lock_irqsave(&p->lock, flags);
210 __ebus_dma_reset(p, 0);
212 csr = (EBDMA_CSR_INT_EN |
214 EBDMA_CSR_BURST_SZ_16 |
218 csr |= EBDMA_CSR_WRITE;
219 if (p->flags & EBUS_DMA_FLAG_TCI_DISABLE)
220 csr |= EBDMA_CSR_TCI_DIS;
222 writel(csr, p->regs + EBDMA_CSR);
224 spin_unlock_irqrestore(&p->lock, flags);
226 EXPORT_SYMBOL(ebus_dma_prepare);
228 unsigned int ebus_dma_residue(struct ebus_dma_info *p)
230 return readl(p->regs + EBDMA_COUNT);
232 EXPORT_SYMBOL(ebus_dma_residue);
234 unsigned int ebus_dma_addr(struct ebus_dma_info *p)
236 return readl(p->regs + EBDMA_ADDR);
238 EXPORT_SYMBOL(ebus_dma_addr);
240 void ebus_dma_enable(struct ebus_dma_info *p, int on)
245 spin_lock_irqsave(&p->lock, flags);
246 orig_csr = csr = readl(p->regs + EBDMA_CSR);
248 csr |= EBDMA_CSR_EN_DMA;
250 csr &= ~EBDMA_CSR_EN_DMA;
251 if ((orig_csr & EBDMA_CSR_EN_DMA) !=
252 (csr & EBDMA_CSR_EN_DMA))
253 writel(csr, p->regs + EBDMA_CSR);
254 spin_unlock_irqrestore(&p->lock, flags);
256 EXPORT_SYMBOL(ebus_dma_enable);