]> git.karo-electronics.de Git - karo-tx-linux.git/commitdiff
ENGR00163603-3 MX6Q/UART : add DMA support to uart driver
authorHuang Shijie <b32955@freescale.com>
Fri, 4 Nov 2011 09:15:20 +0000 (17:15 +0800)
committerLothar Waßmann <LW@KARO-electronics.de>
Fri, 24 May 2013 06:33:33 +0000 (08:33 +0200)
add the DMA support for the uart driver.

Signed-off-by: Huang Shijie <b32955@freescale.com>
drivers/tty/serial/imx.c

index a2c0e91c706b56908299354cde55f00ec2673c1b..0a8c6d0f963afea319b4730f6bd3d68ec418d6c1 100644 (file)
 
 #include <asm/io.h>
 #include <asm/irq.h>
+#include <mach/dma.h>
 #include <mach/hardware.h>
 #include <mach/imx-uart.h>
 
+
 /* Register definitions */
 #define URXD0 0x0  /* Receiver Register */
 #define URTX0 0x40 /* Transmitter Register */
@@ -80,6 +82,7 @@
 #define  UCR1_ADBR       (1<<14) /* Auto detect baud rate */
 #define  UCR1_TRDYEN     (1<<13) /* Transmitter ready interrupt enable */
 #define  UCR1_IDEN       (1<<12) /* Idle condition interrupt */
+#define  UCR1_ICD_REG(x) (((x) & 3) << 10) /* idle condition detect */
 #define  UCR1_RRDYEN     (1<<9)         /* Recv ready interrupt enable */
 #define  UCR1_RDMAEN     (1<<8)         /* Recv ready DMA enable */
 #define  UCR1_IREN       (1<<7)         /* Infrared interface enable */
@@ -88,6 +91,7 @@
 #define  UCR1_SNDBRK     (1<<4)         /* Send break */
 #define  UCR1_TDMAEN     (1<<3)         /* Transmitter ready DMA enable */
 #define  MX1_UCR1_UARTCLKEN  (1<<2)     /* UART clock enabled, mx1 only */
+#define  UCR1_ATDMAEN    (1<<2)  /* Aging DMA Timer Enable */
 #define  UCR1_DOZE       (1<<1)         /* Doze */
 #define  UCR1_UARTEN     (1<<0)         /* UART enabled */
 #define  UCR2_ESCI              (1<<15) /* Escape seq interrupt enable */
 #define  UCR2_STPB       (1<<6)         /* Stop */
 #define  UCR2_WS         (1<<5)         /* Word size */
 #define  UCR2_RTSEN      (1<<4)         /* Request to send interrupt enable */
+#define  UCR2_ATEN       (1<<3)  /* Aging Timer Enable */
 #define  UCR2_TXEN       (1<<2)         /* Transmitter enabled */
 #define  UCR2_RXEN       (1<<1)         /* Receiver enabled */
 #define  UCR2_SRST      (1<<0)  /* SW reset */
 #define  UCR4_ENIRI     (1<<8)  /* Serial infrared interrupt enable */
 #define  UCR4_WKEN      (1<<7)  /* Wake interrupt enable */
 #define  UCR4_REF16     (1<<6)  /* Ref freq 16 MHz */
+#define  UCR4_IDDMAEN    (1<<6)  /* DMA IDLE Condition Detected */
 #define  UCR4_IRSC      (1<<5)  /* IR special case */
 #define  UCR4_TCEN      (1<<3)  /* Transmit complete interrupt enable */
 #define  UCR4_BKEN      (1<<2)  /* Break condition interrupt enable */
@@ -194,6 +200,16 @@ struct imx_port {
        unsigned int            irda_inv_tx:1;
        unsigned short          trcv_delay; /* transceiver delay */
        struct clk              *clk;
+
+       /* DMA fields */
+       int                     enable_dma;
+       struct imx_dma_data     dma_data;
+       struct dma_chan         *dma_chan_rx, *dma_chan_tx;
+       struct scatterlist      rx_sgl, tx_sgl;
+       void                    *rx_buf, *tx_buf;
+       unsigned int            rx_bytes;
+       struct work_struct      tsk_dma_rx, tsk_dma_tx;
+       bool                    dma_is_rxing;
 };
 
 #ifdef CONFIG_IRDA
@@ -343,6 +359,74 @@ static inline void imx_transmit_buffer(struct imx_port *sport)
                imx_stop_tx(&sport->port);
 }
 
+static void dma_tx_callback(void *data)
+{
+       struct imx_port *sport = data;
+       struct scatterlist *sgl = &sport->tx_sgl;
+       struct circ_buf *xmit = &sport->port.state->xmit;
+
+       dma_unmap_sg(sport->port.dev, sgl, 1, DMA_TO_DEVICE);
+
+       if (uart_circ_chars_pending(xmit) < WAKEUP_CHARS)
+               uart_write_wakeup(&sport->port);
+       schedule_work(&sport->tsk_dma_tx);
+}
+
+static void dma_tx_work(struct work_struct *w)
+{
+       struct imx_port *sport = container_of(w, struct imx_port, tsk_dma_tx);
+       struct circ_buf *xmit = &sport->port.state->xmit;
+       struct scatterlist *sgl = &sport->tx_sgl;
+       struct dma_async_tx_descriptor *desc;
+       struct dma_chan *chan = sport->dma_chan_tx;
+       enum dma_status status;
+       unsigned long flags;
+       int tx_num;
+       int ret;
+
+       status = chan->device->device_tx_status(chan, (dma_cookie_t)NULL, NULL);
+       if (DMA_IN_PROGRESS == status)
+               return;
+
+       spin_lock_irqsave(&sport->port.lock, flags);
+       tx_num = uart_circ_chars_pending(xmit);
+       if (tx_num > 0) {
+               if (xmit->tail > xmit->head) {
+                       memcpy(sport->tx_buf, xmit->buf + xmit->tail,
+                              UART_XMIT_SIZE - xmit->tail);
+                       memcpy(sport->tx_buf + (UART_XMIT_SIZE - xmit->tail),
+                               xmit->buf, xmit->head);
+               } else
+                       memcpy(sport->tx_buf, xmit->buf + xmit->tail, tx_num);
+
+               /* update the tail. */
+               xmit->tail = (xmit->tail + tx_num) & (UART_XMIT_SIZE - 1);
+               sport->port.icount.tx += tx_num;
+               spin_unlock_irqrestore(&sport->port.lock, flags);
+
+               sg_init_one(sgl, sport->tx_buf, tx_num);
+               ret = dma_map_sg(sport->port.dev, sgl, 1, DMA_TO_DEVICE);
+               if (ret == 0) {
+                       pr_err("DMA mapping error for TX.\n");
+                       return;
+               }
+               desc = chan->device->device_prep_slave_sg(chan,
+                                       sgl, 1, DMA_TO_DEVICE, 0);
+               if (!desc) {
+                       pr_err("We cannot prepare for the TX slave dma!\n");
+                       return;
+               }
+               desc->callback = dma_tx_callback;
+               desc->callback_param = sport;
+
+               /* fire it */
+               dmaengine_submit(desc);
+               return;
+       }
+       spin_unlock_irqrestore(&sport->port.lock, flags);
+       return;
+}
+
 /*
  * interrupts disabled on entry
  */
@@ -362,8 +446,10 @@ static void imx_start_tx(struct uart_port *port)
                writel(temp, sport->port.membase + UCR1);
        }
 
-       temp = readl(sport->port.membase + UCR1);
-       writel(temp | UCR1_TXMPTYEN, sport->port.membase + UCR1);
+       if (!sport->enable_dma) {
+               temp = readl(sport->port.membase + UCR1);
+               writel(temp | UCR1_TXMPTYEN, sport->port.membase + UCR1);
+       }
 
        if (USE_IRDA(sport)) {
                temp = readl(sport->port.membase + UCR1);
@@ -375,6 +461,11 @@ static void imx_start_tx(struct uart_port *port)
                writel(temp, sport->port.membase + UCR4);
        }
 
+       if (sport->enable_dma) {
+               schedule_work(&sport->tsk_dma_tx);
+               return;
+       }
+
        if (readl(sport->port.membase + UTS) & UTS_TXEMPTY)
                imx_transmit_buffer(sport);
 }
@@ -487,6 +578,28 @@ out:
        return IRQ_HANDLED;
 }
 
+/*
+ * We wait for the RXFIFO is filled with some data, and then
+ * arise a DMA operation to receive the data.
+ */
+static void imx_dma_rxint(struct imx_port *sport)
+{
+       unsigned long temp;
+
+       temp = readl(sport->port.membase + USR2);
+       if ((temp & USR2_RDR) && !sport->dma_is_rxing) {
+               sport->dma_is_rxing = true;
+
+               /* disable the `Recerver Ready Interrrupt` */
+               temp = readl(sport->port.membase + UCR1);
+               temp &= ~(UCR1_RRDYEN);
+               writel(temp, sport->port.membase + UCR1);
+
+               /* tell the DMA to receive the data. */
+               schedule_work(&sport->tsk_dma_rx);
+       }
+}
+
 static irqreturn_t imx_int(int irq, void *dev_id)
 {
        struct imx_port *sport = dev_id;
@@ -494,8 +607,12 @@ static irqreturn_t imx_int(int irq, void *dev_id)
 
        sts = readl(sport->port.membase + USR1);
 
-       if (sts & USR1_RRDY)
-               imx_rxint(irq, dev_id);
+       if (sts & USR1_RRDY) {
+               if (sport->enable_dma)
+                       imx_dma_rxint(sport);
+               else
+                       imx_rxint(irq, dev_id);
+       }
 
        if (sts & USR1_TRDY &&
                        readl(sport->port.membase + UCR1) & UCR1_TXMPTYEN)
@@ -595,6 +712,186 @@ static int imx_setup_ufcr(struct imx_port *sport, unsigned int mode)
        return 0;
 }
 
+static bool imx_uart_filter(struct dma_chan *chan, void *param)
+{
+       struct imx_port *sport = param;
+
+       if (!imx_dma_is_general_purpose(chan))
+               return false;
+       chan->private = &sport->dma_data;
+       return true;
+}
+
+#define RX_BUF_SIZE    (PAGE_SIZE)
+static int start_rx_dma(struct imx_port *sport);
+
+static void dma_rx_work(struct work_struct *w)
+{
+       struct imx_port *sport = container_of(w, struct imx_port, tsk_dma_rx);
+       struct tty_struct *tty = sport->port.state->port.tty;
+
+       if (sport->rx_bytes) {
+               tty_insert_flip_string(tty, sport->rx_buf, sport->rx_bytes);
+               tty_flip_buffer_push(tty);
+               sport->rx_bytes = 0;
+       }
+       start_rx_dma(sport);
+}
+
+/*
+ * There are two kinds RX DMA interrupts:
+ *   [1] the RX DMA buffer is full.
+ *   [2] the Aging timer reached its final value(enabled the UCR4_IDDMAEN).
+ */
+static void dma_rx_callback(void *data)
+{
+       struct imx_port *sport = data;
+       struct dma_chan *chan = sport->dma_chan_rx;
+       unsigned int count;
+       struct tty_struct *tty;
+       struct scatterlist *sgl;
+       struct dma_tx_state state;
+       enum dma_status status;
+
+       tty = sport->port.state->port.tty;
+       sgl = &sport->rx_sgl;
+
+       /* unmap it first */
+       dma_unmap_sg(sport->port.dev, sgl, 1, DMA_FROM_DEVICE);
+
+       status = chan->device->device_tx_status(chan,
+                                       (dma_cookie_t)NULL, &state);
+       count = RX_BUF_SIZE - state.residue;
+       if (count) {
+               sport->rx_bytes = count;
+               schedule_work(&sport->tsk_dma_rx);
+       } else {
+               unsigned long temp;
+
+               /* Enable the interrupt when the RXFIFO is not empty. */
+               temp = readl(sport->port.membase + UCR1);
+               temp |= UCR1_RRDYEN;
+               writel(temp, sport->port.membase + UCR1);
+               sport->dma_is_rxing = false;
+       }
+}
+
+static int start_rx_dma(struct imx_port *sport)
+{
+       struct scatterlist *sgl = &sport->rx_sgl;
+       struct dma_chan *chan = sport->dma_chan_rx;
+       struct dma_async_tx_descriptor *desc;
+       int ret;
+
+       sg_init_one(sgl, sport->rx_buf, RX_BUF_SIZE);
+       ret = dma_map_sg(sport->port.dev, sgl, 1, DMA_FROM_DEVICE);
+       if (ret == 0) {
+               pr_err("DMA mapping error for RX.\n");
+               return -EINVAL;
+       }
+       desc = chan->device->device_prep_slave_sg(chan,
+                               sgl, 1, DMA_FROM_DEVICE, 0);
+       if (!desc) {
+               pr_err("We cannot prepare for the RX slave dma!\n");
+               return -EINVAL;
+       }
+       desc->callback = dma_rx_callback;
+       desc->callback_param = sport;
+
+       dmaengine_submit(desc);
+       return 0;
+}
+
+static void imx_uart_dma_exit(struct imx_port *sport)
+{
+       if (sport->dma_chan_rx) {
+               dma_release_channel(sport->dma_chan_rx);
+               sport->dma_chan_rx = NULL;
+
+               kfree(sport->rx_buf);
+               sport->rx_buf = NULL;
+       }
+
+       if (sport->dma_chan_tx) {
+               dma_release_channel(sport->dma_chan_tx);
+               sport->dma_chan_tx = NULL;
+
+               kfree(sport->tx_buf);
+               sport->tx_buf = NULL;
+       }
+}
+
+/* see the "i.MX61 SDMA Scripts User Manual.doc" for the parameters */
+static int imx_uart_dma_init(struct imx_port *sport)
+{
+       struct dma_slave_config slave_config;
+       dma_cap_mask_t mask;
+       int ret;
+
+       /* prepare for RX : */
+       dma_cap_zero(mask);
+       dma_cap_set(DMA_SLAVE, mask);
+
+       sport->dma_data.priority = DMA_PRIO_HIGH;
+       sport->dma_data.dma_request = MX6Q_DMA_REQ_UART2_RX;
+       sport->dma_data.peripheral_type = IMX_DMATYPE_UART;
+
+       sport->dma_chan_rx = dma_request_channel(mask, imx_uart_filter, sport);
+       if (!sport->dma_chan_rx) {
+               pr_err("cannot get the DMA channel.\n");
+               ret = -EINVAL;
+               goto err;
+       }
+
+       slave_config.direction = DMA_FROM_DEVICE;
+       slave_config.src_addr = sport->port.mapbase + URXD0;
+       slave_config.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
+       slave_config.src_maxburst = RXTL; /* fix me */
+       ret = dmaengine_slave_config(sport->dma_chan_rx, &slave_config);
+       if (ret) {
+               pr_err("error in RX dma configuration.\n");
+               goto err;
+       }
+
+       sport->rx_buf = kzalloc(PAGE_SIZE, GFP_DMA);
+       if (!sport->rx_buf) {
+               pr_err("cannot alloc DMA buffer.\n");
+               ret = -ENOMEM;
+               goto err;
+       }
+       sport->rx_bytes = 0;
+
+       /* prepare for TX : */
+       sport->dma_data.dma_request = MX6Q_DMA_REQ_UART2_TX;
+       sport->dma_chan_tx = dma_request_channel(mask, imx_uart_filter, sport);
+       if (!sport->dma_chan_tx) {
+               pr_err("cannot get the TX DMA channel!\n");
+               ret = -EINVAL;
+               goto err;
+       }
+
+       slave_config.direction = DMA_TO_DEVICE;
+       slave_config.dst_addr = sport->port.mapbase + URTX0;
+       slave_config.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
+       slave_config.dst_maxburst = TXTL; /* fix me */
+       ret = dmaengine_slave_config(sport->dma_chan_tx, &slave_config);
+       if (ret) {
+               pr_err("error in TX dma configuration.");
+               goto err;
+       }
+
+       sport->tx_buf = kzalloc(PAGE_SIZE, GFP_DMA);
+       if (!sport->tx_buf) {
+               pr_err("cannot alloc DMA buffer.\n");
+               ret = -ENOMEM;
+               goto err;
+       }
+       return 0;
+err:
+       imx_uart_dma_exit(sport);
+       return ret;
+}
+
 /* half the RX buffer size */
 #define CTSTL 16
 
@@ -667,6 +964,18 @@ static int imx_startup(struct uart_port *port)
                }
        }
 
+       /* Enable the SDMA for uart. */
+       if (sport->enable_dma) {
+               int ret;
+               ret = imx_uart_dma_init(sport);
+               if (ret)
+                       goto error_out3;
+
+               sport->port.flags |= UPF_LOW_LATENCY;
+               INIT_WORK(&sport->tsk_dma_tx, dma_tx_work);
+               INIT_WORK(&sport->tsk_dma_rx, dma_rx_work);
+       }
+
        /*
         * Finally, clear and enable interrupts
         */
@@ -674,6 +983,11 @@ static int imx_startup(struct uart_port *port)
 
        temp = readl(sport->port.membase + UCR1);
        temp |= UCR1_RRDYEN | UCR1_RTSDEN | UCR1_UARTEN;
+       if (sport->enable_dma) {
+               temp |= UCR1_RDMAEN | UCR1_TDMAEN | UCR1_ATDMAEN;
+               /* ICD, wait for more than 32 frames, but it still to short. */
+               temp |= UCR1_ICD_REG(3);
+       }
 
        if (USE_IRDA(sport)) {
                temp |= UCR1_IREN;
@@ -717,6 +1031,12 @@ static int imx_startup(struct uart_port *port)
                writel(temp, sport->port.membase + UCR3);
        }
 
+       if (sport->enable_dma) {
+               temp = readl(sport->port.membase + UCR4);
+               temp |= UCR4_IDDMAEN;
+               writel(temp, sport->port.membase + UCR4);
+       }
+
        /*
         * Enable modem status interrupts
         */
@@ -792,6 +1112,8 @@ static void imx_shutdown(struct uart_port *port)
                temp &= ~(UCR1_IREN);
 
        writel(temp, sport->port.membase + UCR1);
+       if (sport->enable_dma)
+               imx_uart_dma_exit(sport);
 }
 
 static void
@@ -1317,6 +1639,9 @@ static int serial_imx_probe(struct platform_device *pdev)
                sport->have_rtscts = 1;
        if (pdata && (pdata->flags & IMXUART_USE_DCEDTE))
                sport->use_dcedte = 1;
+       if (pdata && (pdata->flags & IMXUART_SDMA))
+               sport->enable_dma = 1;
+
 #ifdef CONFIG_IRDA
        if (pdata && (pdata->flags & IMXUART_IRDA))
                sport->use_irda = 1;