2 * Copyright 2005-2011 Freescale Semiconductor, Inc. All Rights Reserved.
6 * The code contained herein is licensed under the GNU General Public
7 * License. You may obtain a copy of the GNU General Public License
8 * Version 2 or later at the following locations:
10 * http://www.opensource.org/licenses/gpl-license.html
11 * http://www.gnu.org/copyleft/gpl.html
17 * @brief This file contains the IPU driver common API functions.
21 #include <linux/types.h>
22 #include <linux/init.h>
23 #include <linux/platform_device.h>
24 #include <linux/err.h>
25 #include <linux/spinlock.h>
26 #include <linux/delay.h>
27 #include <linux/interrupt.h>
29 #include <linux/irq.h>
30 #include <linux/irqdesc.h>
31 #include <linux/clk.h>
32 #include <mach/clock.h>
33 #include <mach/hardware.h>
34 #include <mach/ipu-v3.h>
35 #include <mach/devices-common.h>
36 #include <asm/cacheflush.h>
37 #include <linux/delay.h>
41 #include "ipu_param_mem.h"
43 static struct ipu_soc ipu_array[MXC_IPU_MAX_NUM];
46 /* Static functions */
47 static irqreturn_t ipu_irq_handler(int irq, void *desc);
49 static inline uint32_t channel_2_dma(ipu_channel_t ch, ipu_buffer_t type)
51 return ((uint32_t) ch >> (6 * type)) & 0x3F;
54 static inline int _ipu_is_ic_chan(uint32_t dma_chan)
56 return ((dma_chan >= 11) && (dma_chan <= 22) && (dma_chan != 17) && (dma_chan != 18));
59 static inline int _ipu_is_ic_graphic_chan(uint32_t dma_chan)
61 return (dma_chan == 14 || dma_chan == 15);
64 /* Either DP BG or DP FG can be graphic window */
65 static inline int _ipu_is_dp_graphic_chan(uint32_t dma_chan)
67 return (dma_chan == 23 || dma_chan == 27);
70 static inline int _ipu_is_irt_chan(uint32_t dma_chan)
72 return ((dma_chan >= 45) && (dma_chan <= 50));
75 static inline int _ipu_is_dmfc_chan(uint32_t dma_chan)
77 return ((dma_chan >= 23) && (dma_chan <= 29));
80 static inline int _ipu_is_smfc_chan(uint32_t dma_chan)
82 return ((dma_chan >= 0) && (dma_chan <= 3));
85 static inline int _ipu_is_trb_chan(uint32_t dma_chan)
87 return (((dma_chan == 8) || (dma_chan == 9) ||
88 (dma_chan == 10) || (dma_chan == 13) ||
89 (dma_chan == 21) || (dma_chan == 23) ||
90 (dma_chan == 27) || (dma_chan == 28)) &&
94 #define idma_is_valid(ch) (ch != NO_DMA)
95 #define idma_mask(ch) (idma_is_valid(ch) ? (1UL << (ch & 0x1F)) : 0)
96 #define idma_is_set(ipu, reg, dma) (ipu_idmac_read(ipu, reg(dma)) & idma_mask(dma))
97 #define tri_cur_buf_mask(ch) (idma_mask(ch*2) * 3)
98 #define tri_cur_buf_shift(ch) (ffs(idma_mask(ch*2)) - 1)
100 static int ipu_reset(struct ipu_soc *ipu)
104 ipu_cm_write(ipu, 0x807FFFFF, IPU_MEM_RST);
106 while (ipu_cm_read(ipu, IPU_MEM_RST) & 0x80000000) {
115 static int __devinit ipu_clk_setup_enable(struct ipu_soc *ipu,
116 struct platform_device *pdev)
118 struct imx_ipuv3_platform_data *plat_data = pdev->dev.platform_data;
119 char ipu_clk[] = "ipu1_clk";
120 char di0_clk[] = "ipu1_di0_clk";
121 char di1_clk[] = "ipu1_di1_clk";
123 ipu_clk[3] += pdev->id;
124 di0_clk[3] += pdev->id;
125 di1_clk[3] += pdev->id;
127 ipu->ipu_clk = clk_get(ipu->dev, ipu_clk);
128 if (IS_ERR(ipu->ipu_clk)) {
129 dev_err(ipu->dev, "clk_get failed");
130 return PTR_ERR(ipu->ipu_clk);
132 dev_dbg(ipu->dev, "ipu_clk = %lu\n", clk_get_rate(ipu->ipu_clk));
134 ipu->pixel_clk[0] = ipu_pixel_clk[0];
135 ipu->pixel_clk[1] = ipu_pixel_clk[1];
137 ipu_lookups[0].clk = &ipu->pixel_clk[0];
138 ipu_lookups[1].clk = &ipu->pixel_clk[1];
139 clkdev_add(&ipu_lookups[0]);
140 clkdev_add(&ipu_lookups[1]);
142 clk_debug_register(&ipu->pixel_clk[0]);
143 clk_debug_register(&ipu->pixel_clk[1]);
145 clk_enable(ipu->ipu_clk);
147 clk_set_parent(&ipu->pixel_clk[0], ipu->ipu_clk);
148 clk_set_parent(&ipu->pixel_clk[1], ipu->ipu_clk);
150 ipu->di_clk[0] = clk_get(ipu->dev, di0_clk);
151 ipu->di_clk[1] = clk_get(ipu->dev, di1_clk);
153 ipu->csi_clk[0] = clk_get(ipu->dev, plat_data->csi_clk[0]);
154 ipu->csi_clk[1] = clk_get(ipu->dev, plat_data->csi_clk[1]);
160 static void ipu_irq_handler(unsigned int irq, struct irq_desc *desc)
162 struct ipu_soc *ipu = irq_desc_get_handler_data(desc);
163 const int int_reg[] = { 1, 2, 3, 4, 11, 12, 13, 14, 15, 0 };
171 status = ipu_cm_read(ipu, IPU_INT_STAT(int_reg[i]));
172 status &= ipu_cm_read(ipu, IPU_INT_CTRL(int_reg[i]));
174 while ((line = ffs(status))) {
176 status &= ~(1UL << line);
177 line += ipu->irq_start + (int_reg[i] - 1) * 32;
178 generic_handle_irq(line);
184 static void ipu_err_irq_handler(unsigned int irq, struct irq_desc *desc)
186 struct ipu_soc *ipu = irq_desc_get_handler_data(desc);
187 const int int_reg[] = { 5, 6, 9, 10, 0 };
195 status = ipu_cm_read(ipu, IPU_INT_STAT(int_reg[i]));
196 status &= ipu_cm_read(ipu, IPU_INT_CTRL(int_reg[i]));
198 while ((line = ffs(status))) {
200 status &= ~(1UL << line);
201 line += ipu->irq_start + (int_reg[i] - 1) * 32;
202 generic_handle_irq(line);
208 static void ipu_ack_irq(struct irq_data *d)
210 struct ipu_soc *ipu = irq_data_get_irq_chip_data(d);
211 unsigned int irq = d->irq - ipu->irq_start;
214 spin_lock_irqsave(&ipu->ipu_lock, flags);
215 ipu_cm_write(ipu, 1 << (irq % 32), IPU_INT_STAT(irq / 32 + 1));
216 spin_unlock_irqrestore(&ipu->ipu_lock, flags);
219 static void ipu_unmask_irq(struct irq_data *d)
221 struct ipu_soc *ipu = irq_data_get_irq_chip_data(d);
222 unsigned int irq = d->irq - ipu->irq_start;
226 spin_lock_irqsave(&ipu->ipu_lock, flags);
227 reg = ipu_cm_read(ipu, IPU_INT_CTRL(irq / 32 + 1));
228 reg |= 1 << (irq % 32);
229 ipu_cm_write(ipu, reg, IPU_INT_CTRL(irq / 32 + 1));
230 spin_unlock_irqrestore(&ipu->ipu_lock, flags);
233 static void ipu_mask_irq(struct irq_data *d)
235 struct ipu_soc *ipu = irq_data_get_irq_chip_data(d);
236 unsigned int irq = d->irq - ipu->irq_start;
240 spin_lock_irqsave(&ipu->ipu_lock, flags);
241 reg = ipu_cm_read(ipu, IPU_INT_CTRL(irq / 32 + 1));
242 reg &= ~(1 << (irq % 32));
243 ipu_cm_write(ipu, reg, IPU_INT_CTRL(irq / 32 + 1));
244 spin_unlock_irqrestore(&ipu->ipu_lock, flags);
247 static struct irq_chip ipu_irq_chip = {
249 .irq_ack = ipu_ack_irq,
250 .irq_mask = ipu_mask_irq,
251 .irq_unmask = ipu_unmask_irq,
254 static void __devinit ipu_irq_setup(struct ipu_soc *ipu)
258 for (i = ipu->irq_start; i < ipu->irq_start + MX5_IPU_IRQS; i++) {
259 irq_set_chip_and_handler(i, &ipu_irq_chip, handle_level_irq);
260 set_irq_flags(i, IRQF_VALID);
261 irq_set_chip_data(i, ipu);
264 irq_set_chained_handler(ipu->irq_sync, ipu_irq_handler);
265 irq_set_handler_data(ipu->irq_sync, ipu);
266 irq_set_chained_handler(ipu->irq_err, ipu_err_irq_handler);
267 irq_set_handler_data(ipu->irq_err, ipu);
270 int ipu_request_irq(struct ipu_soc *ipu, unsigned int irq,
271 irq_handler_t handler, unsigned long flags,
272 const char *name, void *dev)
274 return request_irq(ipu->irq_start + irq, handler, flags, name, dev);
276 EXPORT_SYMBOL_GPL(ipu_request_irq);
278 void ipu_enable_irq(struct ipu_soc *ipu, unsigned int irq)
280 return enable_irq(ipu->irq_start + irq);
282 EXPORT_SYMBOL_GPL(ipu_disable_irq);
284 void ipu_disable_irq(struct ipu_soc *ipu, unsigned int irq)
286 return disable_irq(ipu->irq_start + irq);
288 EXPORT_SYMBOL_GPL(ipu_disable_irq);
290 void ipu_free_irq(struct ipu_soc *ipu, unsigned int irq, void *dev_id)
292 free_irq(ipu->irq_start + irq, dev_id);
294 EXPORT_SYMBOL_GPL(ipu_free_irq);
296 static irqreturn_t ipu_completion_handler(int irq, void *dev)
298 struct completion *completion = dev;
300 complete(completion);
304 int ipu_wait_for_interrupt(struct ipu_soc *ipu, int interrupt, int timeout_ms)
306 DECLARE_COMPLETION_ONSTACK(completion);
309 ret = ipu_request_irq(ipu, interrupt, ipu_completion_handler,
310 0, NULL, &completion);
313 "ipu request irq %d fail\n", interrupt);
317 ret = wait_for_completion_timeout(&completion,
318 msecs_to_jiffies(timeout_ms));
320 ipu_free_irq(ipu, interrupt, &completion);
322 return ret > 0 ? 0 : -ETIMEDOUT;
324 EXPORT_SYMBOL_GPL(ipu_wait_for_interrupt);
327 struct ipu_soc *ipu_get_soc(int id)
329 if (id >= MXC_IPU_MAX_NUM)
330 return ERR_PTR(-ENODEV);
331 else if (!ipu_array[id].online)
332 return ERR_PTR(-ENODEV);
334 return &(ipu_array[id]);
336 EXPORT_SYMBOL_GPL(ipu_get_soc);
338 void _ipu_lock(struct ipu_soc *ipu)
340 /*TODO:remove in_irq() condition after v4l2 driver rewrite*/
341 if (!in_irq() && !in_softirq())
342 mutex_lock(&ipu->mutex_lock);
345 void _ipu_unlock(struct ipu_soc *ipu)
347 /*TODO:remove in_irq() condition after v4l2 driver rewrite*/
348 if (!in_irq() && !in_softirq())
349 mutex_unlock(&ipu->mutex_lock);
352 void _ipu_get(struct ipu_soc *ipu)
354 if (atomic_inc_return(&ipu->ipu_use_count) == 1)
355 clk_enable(ipu->ipu_clk);
358 void _ipu_put(struct ipu_soc *ipu)
360 if (atomic_dec_return(&ipu->ipu_use_count) == 0)
361 clk_disable(ipu->ipu_clk);
365 * This function is called by the driver framework to initialize the IPU
368 * @param dev The device structure for the IPU passed in by the
371 * @return Returns 0 on success or negative error code on error
373 static int __devinit ipu_probe(struct platform_device *pdev)
375 struct imx_ipuv3_platform_data *plat_data = pdev->dev.platform_data;
377 struct resource *res;
378 unsigned long ipu_base;
381 if (pdev->id >= MXC_IPU_MAX_NUM)
384 ipu = &ipu_array[pdev->id];
385 memset(ipu, 0, sizeof(struct ipu_soc));
387 spin_lock_init(&ipu->spin_lock);
388 mutex_init(&ipu->mutex_lock);
389 atomic_set(&ipu->ipu_use_count, 0);
391 g_ipu_hw_rev = plat_data->rev;
393 ipu->dev = &pdev->dev;
396 plat_data->init(pdev->id);
398 ipu->irq_sync = platform_get_irq(pdev, 0);
399 ipu->irq_err = platform_get_irq(pdev, 1);
400 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
402 if (!res || ipu->irq_sync < 0 || ipu->irq_err < 0) {
407 if (request_irq(ipu->irq_sync, ipu_irq_handler, 0, pdev->name, ipu) != 0) {
408 dev_err(ipu->dev, "request SYNC interrupt failed\n");
410 goto failed_req_irq_sync;
412 /* Some platforms have 2 IPU interrupts */
413 if (ipu->irq_err >= 0) {
415 (ipu->irq_err, ipu_irq_handler, 0, pdev->name, ipu) != 0) {
416 dev_err(ipu->dev, "request ERR interrupt failed\n");
418 goto failed_req_irq_err;
422 ipu_base = res->start;
424 if (g_ipu_hw_rev == 4) /* IPUv3H */
425 ipu_base += IPUV3H_REG_BASE;
426 else if (g_ipu_hw_rev == 3) /* IPUv3M */
427 ipu_base += IPUV3M_REG_BASE;
428 else /* IPUv3D, v3E, v3EX */
429 ipu_base += IPUV3DEX_REG_BASE;
431 ipu->cm_reg = ioremap(ipu_base + IPU_CM_REG_BASE, PAGE_SIZE);
432 ipu->ic_reg = ioremap(ipu_base + IPU_IC_REG_BASE, PAGE_SIZE);
433 ipu->idmac_reg = ioremap(ipu_base + IPU_IDMAC_REG_BASE, PAGE_SIZE);
434 /* DP Registers are accessed thru the SRM */
435 ipu->dp_reg = ioremap(ipu_base + IPU_SRM_REG_BASE, PAGE_SIZE);
436 ipu->dc_reg = ioremap(ipu_base + IPU_DC_REG_BASE, PAGE_SIZE);
437 ipu->dmfc_reg = ioremap(ipu_base + IPU_DMFC_REG_BASE, PAGE_SIZE);
438 ipu->di_reg[0] = ioremap(ipu_base + IPU_DI0_REG_BASE, PAGE_SIZE);
439 ipu->di_reg[1] = ioremap(ipu_base + IPU_DI1_REG_BASE, PAGE_SIZE);
440 ipu->smfc_reg = ioremap(ipu_base + IPU_SMFC_REG_BASE, PAGE_SIZE);
441 ipu->csi_reg[0] = ioremap(ipu_base + IPU_CSI0_REG_BASE, PAGE_SIZE);
442 ipu->csi_reg[1] = ioremap(ipu_base + IPU_CSI1_REG_BASE, PAGE_SIZE);
443 ipu->cpmem_base = ioremap(ipu_base + IPU_CPMEM_REG_BASE, SZ_128K);
444 ipu->tpmem_base = ioremap(ipu_base + IPU_TPM_REG_BASE, SZ_64K);
445 ipu->dc_tmpl_reg = ioremap(ipu_base + IPU_DC_TMPL_REG_BASE, SZ_128K);
446 ipu->vdi_reg = ioremap(ipu_base + IPU_VDI_REG_BASE, PAGE_SIZE);
447 ipu->disp_base[1] = ioremap(ipu_base + IPU_DISP1_BASE, SZ_4K);
449 if (!ipu->cm_reg || !ipu->ic_reg || !ipu->idmac_reg ||
450 !ipu->dp_reg || !ipu->dc_reg || !ipu->dmfc_reg ||
451 !ipu->di_reg[0] || !ipu->di_reg[1] || !ipu->smfc_reg ||
452 !ipu->csi_reg[0] || !ipu->csi_reg[1] || !ipu->cpmem_base ||
453 !ipu->tpmem_base || !ipu->dc_tmpl_reg || !ipu->disp_base[1]
459 dev_dbg(ipu->dev, "IPU CM Regs = %p\n", ipu->cm_reg);
460 dev_dbg(ipu->dev, "IPU IC Regs = %p\n", ipu->ic_reg);
461 dev_dbg(ipu->dev, "IPU IDMAC Regs = %p\n", ipu->idmac_reg);
462 dev_dbg(ipu->dev, "IPU DP Regs = %p\n", ipu->dp_reg);
463 dev_dbg(ipu->dev, "IPU DC Regs = %p\n", ipu->dc_reg);
464 dev_dbg(ipu->dev, "IPU DMFC Regs = %p\n", ipu->dmfc_reg);
465 dev_dbg(ipu->dev, "IPU DI0 Regs = %p\n", ipu->di_reg[0]);
466 dev_dbg(ipu->dev, "IPU DI1 Regs = %p\n", ipu->di_reg[1]);
467 dev_dbg(ipu->dev, "IPU SMFC Regs = %p\n", ipu->smfc_reg);
468 dev_dbg(ipu->dev, "IPU CSI0 Regs = %p\n", ipu->csi_reg[0]);
469 dev_dbg(ipu->dev, "IPU CSI1 Regs = %p\n", ipu->csi_reg[1]);
470 dev_dbg(ipu->dev, "IPU CPMem = %p\n", ipu->cpmem_base);
471 dev_dbg(ipu->dev, "IPU TPMem = %p\n", ipu->tpmem_base);
472 dev_dbg(ipu->dev, "IPU DC Template Mem = %p\n", ipu->dc_tmpl_reg);
473 dev_dbg(ipu->dev, "IPU Display Region 1 Mem = %p\n", ipu->disp_base[1]);
474 dev_dbg(ipu->dev, "IPU VDI Regs = %p\n", ipu->vdi_reg);
476 ret = ipu_clk_setup_enable(ipu, pdev);
478 dev_err(ipu->dev, "ipu clk setup failed\n");
479 goto failed_clk_setup;
482 platform_set_drvdata(pdev, ipu);
488 /* Set sync refresh channels and CSI->mem channel as high priority */
489 ipu_idmac_write(ipu, 0x18800001L, IDMAC_CHA_PRI(0));
491 /* Set MCU_T to divide MCU access window into 2 */
492 ipu_cm_write(ipu, 0x00400000L | (IPU_MCU_T_DEFAULT << 18), IPU_DISP_GEN);
494 clk_disable(ipu->ipu_clk);
496 register_ipu_device(ipu, pdev->id);
503 iounmap(ipu->cm_reg);
504 iounmap(ipu->ic_reg);
505 iounmap(ipu->idmac_reg);
506 iounmap(ipu->dc_reg);
507 iounmap(ipu->dp_reg);
508 iounmap(ipu->dmfc_reg);
509 iounmap(ipu->di_reg[0]);
510 iounmap(ipu->di_reg[1]);
511 iounmap(ipu->smfc_reg);
512 iounmap(ipu->csi_reg[0]);
513 iounmap(ipu->csi_reg[1]);
514 iounmap(ipu->cpmem_base);
515 iounmap(ipu->tpmem_base);
516 iounmap(ipu->dc_tmpl_reg);
517 iounmap(ipu->disp_base[1]);
518 iounmap(ipu->vdi_reg);
521 free_irq(ipu->irq_err, ipu);
523 free_irq(ipu->irq_sync, ipu);
529 int __devexit ipu_remove(struct platform_device *pdev)
531 struct ipu_soc *ipu = platform_get_drvdata(pdev);
533 unregister_ipu_device(ipu, pdev->id);
536 free_irq(ipu->irq_sync, ipu);
538 free_irq(ipu->irq_err, ipu);
540 clk_put(ipu->ipu_clk);
542 iounmap(ipu->cm_reg);
543 iounmap(ipu->ic_reg);
544 iounmap(ipu->idmac_reg);
545 iounmap(ipu->dc_reg);
546 iounmap(ipu->dp_reg);
547 iounmap(ipu->dmfc_reg);
548 iounmap(ipu->di_reg[0]);
549 iounmap(ipu->di_reg[1]);
550 iounmap(ipu->smfc_reg);
551 iounmap(ipu->csi_reg[0]);
552 iounmap(ipu->csi_reg[1]);
553 iounmap(ipu->cpmem_base);
554 iounmap(ipu->tpmem_base);
555 iounmap(ipu->dc_tmpl_reg);
556 iounmap(ipu->disp_base[1]);
557 iounmap(ipu->vdi_reg);
562 void ipu_dump_registers(struct ipu_soc *ipu)
564 dev_dbg(ipu->dev, "IPU_CONF = \t0x%08X\n", ipu_cm_read(ipu, IPU_CONF));
565 dev_dbg(ipu->dev, "IDMAC_CONF = \t0x%08X\n", ipu_idmac_read(ipu, IDMAC_CONF));
566 dev_dbg(ipu->dev, "IDMAC_CHA_EN1 = \t0x%08X\n",
567 ipu_idmac_read(ipu, IDMAC_CHA_EN(0)));
568 dev_dbg(ipu->dev, "IDMAC_CHA_EN2 = \t0x%08X\n",
569 ipu_idmac_read(ipu, IDMAC_CHA_EN(32)));
570 dev_dbg(ipu->dev, "IDMAC_CHA_PRI1 = \t0x%08X\n",
571 ipu_idmac_read(ipu, IDMAC_CHA_PRI(0)));
572 dev_dbg(ipu->dev, "IDMAC_CHA_PRI2 = \t0x%08X\n",
573 ipu_idmac_read(ipu, IDMAC_CHA_PRI(32)));
574 dev_dbg(ipu->dev, "IDMAC_BAND_EN1 = \t0x%08X\n",
575 ipu_idmac_read(ipu, IDMAC_BAND_EN(0)));
576 dev_dbg(ipu->dev, "IDMAC_BAND_EN2 = \t0x%08X\n",
577 ipu_idmac_read(ipu, IDMAC_BAND_EN(32)));
578 dev_dbg(ipu->dev, "IPU_CHA_DB_MODE_SEL0 = \t0x%08X\n",
579 ipu_cm_read(ipu, IPU_CHA_DB_MODE_SEL(0)));
580 dev_dbg(ipu->dev, "IPU_CHA_DB_MODE_SEL1 = \t0x%08X\n",
581 ipu_cm_read(ipu, IPU_CHA_DB_MODE_SEL(32)));
582 if (g_ipu_hw_rev >= 2) {
583 dev_dbg(ipu->dev, "IPU_CHA_TRB_MODE_SEL0 = \t0x%08X\n",
584 ipu_cm_read(ipu, IPU_CHA_TRB_MODE_SEL(0)));
585 dev_dbg(ipu->dev, "IPU_CHA_TRB_MODE_SEL1 = \t0x%08X\n",
586 ipu_cm_read(ipu, IPU_CHA_TRB_MODE_SEL(32)));
588 dev_dbg(ipu->dev, "DMFC_WR_CHAN = \t0x%08X\n",
589 ipu_dmfc_read(ipu, DMFC_WR_CHAN));
590 dev_dbg(ipu->dev, "DMFC_WR_CHAN_DEF = \t0x%08X\n",
591 ipu_dmfc_read(ipu, DMFC_WR_CHAN_DEF));
592 dev_dbg(ipu->dev, "DMFC_DP_CHAN = \t0x%08X\n",
593 ipu_dmfc_read(ipu, DMFC_DP_CHAN));
594 dev_dbg(ipu->dev, "DMFC_DP_CHAN_DEF = \t0x%08X\n",
595 ipu_dmfc_read(ipu, DMFC_DP_CHAN_DEF));
596 dev_dbg(ipu->dev, "DMFC_IC_CTRL = \t0x%08X\n",
597 ipu_dmfc_read(ipu, DMFC_IC_CTRL));
598 dev_dbg(ipu->dev, "IPU_FS_PROC_FLOW1 = \t0x%08X\n",
599 ipu_cm_read(ipu, IPU_FS_PROC_FLOW1));
600 dev_dbg(ipu->dev, "IPU_FS_PROC_FLOW2 = \t0x%08X\n",
601 ipu_cm_read(ipu, IPU_FS_PROC_FLOW2));
602 dev_dbg(ipu->dev, "IPU_FS_PROC_FLOW3 = \t0x%08X\n",
603 ipu_cm_read(ipu, IPU_FS_PROC_FLOW3));
604 dev_dbg(ipu->dev, "IPU_FS_DISP_FLOW1 = \t0x%08X\n",
605 ipu_cm_read(ipu, IPU_FS_DISP_FLOW1));
609 * This function is called to initialize a logical IPU channel.
611 * @param ipu ipu handler
612 * @param channel Input parameter for the logical channel ID to init.
614 * @param params Input parameter containing union of channel
615 * initialization parameters.
617 * @return Returns 0 on success or negative error code on fail
619 int32_t ipu_init_channel(struct ipu_soc *ipu, ipu_channel_t channel, ipu_channel_params_t *params)
625 dev_dbg(ipu->dev, "init channel = %d\n", IPU_CHAN_ID(channel));
631 if (ipu->channel_init_mask & (1L << IPU_CHAN_ID(channel))) {
632 dev_warn(ipu->dev, "Warning: channel already initialized %d\n",
633 IPU_CHAN_ID(channel));
636 ipu_conf = ipu_cm_read(ipu, IPU_CONF);
643 if (params->csi_mem.csi > 1) {
648 if (params->csi_mem.interlaced)
649 ipu->chan_is_interlaced[channel_2_dma(channel,
650 IPU_OUTPUT_BUFFER)] = true;
652 ipu->chan_is_interlaced[channel_2_dma(channel,
653 IPU_OUTPUT_BUFFER)] = false;
655 ipu->smfc_use_count++;
656 ipu->csi_channel[params->csi_mem.csi] = channel;
659 if (params->csi_mem.mipi_en) {
660 ipu_conf |= (1 << (IPU_CONF_CSI0_DATA_SOURCE_OFFSET +
661 params->csi_mem.csi));
662 _ipu_smfc_init(ipu, channel, params->csi_mem.mipi_id,
663 params->csi_mem.csi);
665 ipu_conf &= ~(1 << (IPU_CONF_CSI0_DATA_SOURCE_OFFSET +
666 params->csi_mem.csi));
667 _ipu_smfc_init(ipu, channel, 0, params->csi_mem.csi);
670 /*CSI data (include compander) dest*/
671 _ipu_csi_init(ipu, channel, params->csi_mem.csi);
673 case CSI_PRP_ENC_MEM:
674 if (params->csi_prp_enc_mem.csi > 1) {
678 if (ipu->using_ic_dirct_ch == MEM_VDI_PRP_VF_MEM) {
682 ipu->using_ic_dirct_ch = CSI_PRP_ENC_MEM;
685 ipu->csi_channel[params->csi_prp_enc_mem.csi] = channel;
687 /*Without SMFC, CSI only support parallel data source*/
688 ipu_conf &= ~(1 << (IPU_CONF_CSI0_DATA_SOURCE_OFFSET +
689 params->csi_prp_enc_mem.csi));
691 /*CSI0/1 feed into IC*/
692 ipu_conf &= ~IPU_CONF_IC_INPUT;
693 if (params->csi_prp_enc_mem.csi)
694 ipu_conf |= IPU_CONF_CSI_SEL;
696 ipu_conf &= ~IPU_CONF_CSI_SEL;
698 /*PRP skip buffer in memory, only valid when RWS_EN is true*/
699 reg = ipu_cm_read(ipu, IPU_FS_PROC_FLOW1);
700 ipu_cm_write(ipu, reg & ~FS_ENC_IN_VALID, IPU_FS_PROC_FLOW1);
702 /*CSI data (include compander) dest*/
703 _ipu_csi_init(ipu, channel, params->csi_prp_enc_mem.csi);
704 _ipu_ic_init_prpenc(ipu, params, true);
707 if (params->csi_prp_vf_mem.csi > 1) {
711 if (ipu->using_ic_dirct_ch == MEM_VDI_PRP_VF_MEM) {
715 ipu->using_ic_dirct_ch = CSI_PRP_VF_MEM;
718 ipu->csi_channel[params->csi_prp_vf_mem.csi] = channel;
720 /*Without SMFC, CSI only support parallel data source*/
721 ipu_conf &= ~(1 << (IPU_CONF_CSI0_DATA_SOURCE_OFFSET +
722 params->csi_prp_vf_mem.csi));
724 /*CSI0/1 feed into IC*/
725 ipu_conf &= ~IPU_CONF_IC_INPUT;
726 if (params->csi_prp_vf_mem.csi)
727 ipu_conf |= IPU_CONF_CSI_SEL;
729 ipu_conf &= ~IPU_CONF_CSI_SEL;
731 /*PRP skip buffer in memory, only valid when RWS_EN is true*/
732 reg = ipu_cm_read(ipu, IPU_FS_PROC_FLOW1);
733 ipu_cm_write(ipu, reg & ~FS_VF_IN_VALID, IPU_FS_PROC_FLOW1);
735 /*CSI data (include compander) dest*/
736 _ipu_csi_init(ipu, channel, params->csi_prp_vf_mem.csi);
737 _ipu_ic_init_prpvf(ipu, params, true);
741 reg = ipu_cm_read(ipu, IPU_FS_PROC_FLOW1);
742 ipu_cm_write(ipu, reg | FS_VF_IN_VALID, IPU_FS_PROC_FLOW1);
744 if (params->mem_prp_vf_mem.graphics_combine_en)
745 ipu->sec_chan_en[IPU_CHAN_ID(channel)] = true;
746 if (params->mem_prp_vf_mem.alpha_chan_en)
747 ipu->thrd_chan_en[IPU_CHAN_ID(channel)] = true;
749 _ipu_ic_init_prpvf(ipu, params, false);
751 case MEM_VDI_PRP_VF_MEM:
752 if ((ipu->using_ic_dirct_ch == CSI_PRP_VF_MEM) ||
753 (ipu->using_ic_dirct_ch == CSI_PRP_ENC_MEM)) {
757 ipu->using_ic_dirct_ch = MEM_VDI_PRP_VF_MEM;
759 ipu->vdi_use_count++;
760 reg = ipu_cm_read(ipu, IPU_FS_PROC_FLOW1);
761 reg &= ~FS_VDI_SRC_SEL_MASK;
762 ipu_cm_write(ipu, reg , IPU_FS_PROC_FLOW1);
764 if (params->mem_prp_vf_mem.graphics_combine_en)
765 ipu->sec_chan_en[IPU_CHAN_ID(channel)] = true;
766 _ipu_ic_init_prpvf(ipu, params, false);
767 _ipu_vdi_init(ipu, channel, params);
769 case MEM_VDI_PRP_VF_MEM_P:
770 _ipu_vdi_init(ipu, channel, params);
772 case MEM_VDI_PRP_VF_MEM_N:
773 _ipu_vdi_init(ipu, channel, params);
777 ipu->rot_use_count++;
778 _ipu_ic_init_rotate_vf(ipu, params);
780 case MEM_PRP_ENC_MEM:
782 reg = ipu_cm_read(ipu, IPU_FS_PROC_FLOW1);
783 ipu_cm_write(ipu, reg | FS_ENC_IN_VALID, IPU_FS_PROC_FLOW1);
784 _ipu_ic_init_prpenc(ipu, params, false);
786 case MEM_ROT_ENC_MEM:
788 ipu->rot_use_count++;
789 _ipu_ic_init_rotate_enc(ipu, params);
792 if (params->mem_pp_mem.graphics_combine_en)
793 ipu->sec_chan_en[IPU_CHAN_ID(channel)] = true;
794 if (params->mem_pp_mem.alpha_chan_en)
795 ipu->thrd_chan_en[IPU_CHAN_ID(channel)] = true;
796 _ipu_ic_init_pp(ipu, params);
800 _ipu_ic_init_rotate_pp(ipu, params);
802 ipu->rot_use_count++;
805 if (params->mem_dc_sync.di > 1) {
810 ipu->dc_di_assignment[1] = params->mem_dc_sync.di;
811 _ipu_dc_init(ipu, 1, params->mem_dc_sync.di,
812 params->mem_dc_sync.interlaced,
813 params->mem_dc_sync.out_pixel_fmt);
814 ipu->di_use_count[params->mem_dc_sync.di]++;
816 ipu->dmfc_use_count++;
819 if (params->mem_dp_bg_sync.di > 1) {
824 if (params->mem_dp_bg_sync.alpha_chan_en)
825 ipu->thrd_chan_en[IPU_CHAN_ID(channel)] = true;
827 ipu->dc_di_assignment[5] = params->mem_dp_bg_sync.di;
828 _ipu_dp_init(ipu, channel, params->mem_dp_bg_sync.in_pixel_fmt,
829 params->mem_dp_bg_sync.out_pixel_fmt);
830 _ipu_dc_init(ipu, 5, params->mem_dp_bg_sync.di,
831 params->mem_dp_bg_sync.interlaced,
832 params->mem_dp_bg_sync.out_pixel_fmt);
833 ipu->di_use_count[params->mem_dp_bg_sync.di]++;
836 ipu->dmfc_use_count++;
839 _ipu_dp_init(ipu, channel, params->mem_dp_fg_sync.in_pixel_fmt,
840 params->mem_dp_fg_sync.out_pixel_fmt);
842 if (params->mem_dp_fg_sync.alpha_chan_en)
843 ipu->thrd_chan_en[IPU_CHAN_ID(channel)] = true;
847 ipu->dmfc_use_count++;
850 if (params->direct_async.di > 1) {
855 ipu->dc_di_assignment[8] = params->direct_async.di;
856 _ipu_dc_init(ipu, 8, params->direct_async.di, false, IPU_PIX_FMT_GENERIC);
857 ipu->di_use_count[params->direct_async.di]++;
861 if (params->direct_async.di > 1) {
866 ipu->dc_di_assignment[9] = params->direct_async.di;
867 _ipu_dc_init(ipu, 9, params->direct_async.di, false, IPU_PIX_FMT_GENERIC);
868 ipu->di_use_count[params->direct_async.di]++;
872 dev_err(ipu->dev, "Missing channel initialization\n");
876 ipu->channel_init_mask |= 1L << IPU_CHAN_ID(channel);
878 ipu_cm_write(ipu, ipu_conf, IPU_CONF);
884 EXPORT_SYMBOL(ipu_init_channel);
887 * This function is called to uninitialize a logical IPU channel.
889 * @param ipu ipu handler
890 * @param channel Input parameter for the logical channel ID to uninit.
892 void ipu_uninit_channel(struct ipu_soc *ipu, ipu_channel_t channel)
895 uint32_t in_dma, out_dma = 0;
900 if ((ipu->channel_init_mask & (1L << IPU_CHAN_ID(channel))) == 0) {
901 dev_err(ipu->dev, "Channel already uninitialized %d\n",
902 IPU_CHAN_ID(channel));
907 /* Make sure channel is disabled */
908 /* Get input and output dma channels */
909 in_dma = channel_2_dma(channel, IPU_VIDEO_IN_BUFFER);
910 out_dma = channel_2_dma(channel, IPU_OUTPUT_BUFFER);
912 if (idma_is_set(ipu, IDMAC_CHA_EN, in_dma) ||
913 idma_is_set(ipu, IDMAC_CHA_EN, out_dma)) {
915 "Channel %d is not disabled, disable first\n",
916 IPU_CHAN_ID(channel));
921 ipu_conf = ipu_cm_read(ipu, IPU_CONF);
923 /* Reset the double buffer */
924 reg = ipu_cm_read(ipu, IPU_CHA_DB_MODE_SEL(in_dma));
925 ipu_cm_write(ipu, reg & ~idma_mask(in_dma), IPU_CHA_DB_MODE_SEL(in_dma));
926 reg = ipu_cm_read(ipu, IPU_CHA_DB_MODE_SEL(out_dma));
927 ipu_cm_write(ipu, reg & ~idma_mask(out_dma), IPU_CHA_DB_MODE_SEL(out_dma));
929 /* Reset the triple buffer */
930 reg = ipu_cm_read(ipu, IPU_CHA_TRB_MODE_SEL(in_dma));
931 ipu_cm_write(ipu, reg & ~idma_mask(in_dma), IPU_CHA_TRB_MODE_SEL(in_dma));
932 reg = ipu_cm_read(ipu, IPU_CHA_TRB_MODE_SEL(out_dma));
933 ipu_cm_write(ipu, reg & ~idma_mask(out_dma), IPU_CHA_TRB_MODE_SEL(out_dma));
935 if (_ipu_is_ic_chan(in_dma) || _ipu_is_dp_graphic_chan(in_dma)) {
936 ipu->sec_chan_en[IPU_CHAN_ID(channel)] = false;
937 ipu->thrd_chan_en[IPU_CHAN_ID(channel)] = false;
945 ipu->smfc_use_count--;
946 if (ipu->csi_channel[0] == channel) {
947 ipu->csi_channel[0] = CHAN_NONE;
948 } else if (ipu->csi_channel[1] == channel) {
949 ipu->csi_channel[1] = CHAN_NONE;
952 case CSI_PRP_ENC_MEM:
954 if (ipu->using_ic_dirct_ch == CSI_PRP_ENC_MEM)
955 ipu->using_ic_dirct_ch = 0;
956 _ipu_ic_uninit_prpenc(ipu);
957 if (ipu->csi_channel[0] == channel) {
958 ipu->csi_channel[0] = CHAN_NONE;
959 } else if (ipu->csi_channel[1] == channel) {
960 ipu->csi_channel[1] = CHAN_NONE;
965 if (ipu->using_ic_dirct_ch == CSI_PRP_VF_MEM)
966 ipu->using_ic_dirct_ch = 0;
967 _ipu_ic_uninit_prpvf(ipu);
968 if (ipu->csi_channel[0] == channel) {
969 ipu->csi_channel[0] = CHAN_NONE;
970 } else if (ipu->csi_channel[1] == channel) {
971 ipu->csi_channel[1] = CHAN_NONE;
976 _ipu_ic_uninit_prpvf(ipu);
977 reg = ipu_cm_read(ipu, IPU_FS_PROC_FLOW1);
978 ipu_cm_write(ipu, reg & ~FS_VF_IN_VALID, IPU_FS_PROC_FLOW1);
980 case MEM_VDI_PRP_VF_MEM:
982 ipu->vdi_use_count--;
983 if (ipu->using_ic_dirct_ch == MEM_VDI_PRP_VF_MEM)
984 ipu->using_ic_dirct_ch = 0;
985 _ipu_ic_uninit_prpvf(ipu);
986 _ipu_vdi_uninit(ipu);
987 reg = ipu_cm_read(ipu, IPU_FS_PROC_FLOW1);
988 ipu_cm_write(ipu, reg & ~FS_VF_IN_VALID, IPU_FS_PROC_FLOW1);
990 case MEM_VDI_PRP_VF_MEM_P:
991 case MEM_VDI_PRP_VF_MEM_N:
994 ipu->rot_use_count--;
996 _ipu_ic_uninit_rotate_vf(ipu);
998 case MEM_PRP_ENC_MEM:
1000 _ipu_ic_uninit_prpenc(ipu);
1001 reg = ipu_cm_read(ipu, IPU_FS_PROC_FLOW1);
1002 ipu_cm_write(ipu, reg & ~FS_ENC_IN_VALID, IPU_FS_PROC_FLOW1);
1004 case MEM_ROT_ENC_MEM:
1005 ipu->rot_use_count--;
1006 ipu->ic_use_count--;
1007 _ipu_ic_uninit_rotate_enc(ipu);
1010 ipu->ic_use_count--;
1011 _ipu_ic_uninit_pp(ipu);
1013 case MEM_ROT_PP_MEM:
1014 ipu->rot_use_count--;
1015 ipu->ic_use_count--;
1016 _ipu_ic_uninit_rotate_pp(ipu);
1019 _ipu_dc_uninit(ipu, 1);
1020 ipu->di_use_count[ipu->dc_di_assignment[1]]--;
1021 ipu->dc_use_count--;
1022 ipu->dmfc_use_count--;
1025 _ipu_dp_uninit(ipu, channel);
1026 _ipu_dc_uninit(ipu, 5);
1027 ipu->di_use_count[ipu->dc_di_assignment[5]]--;
1028 ipu->dc_use_count--;
1029 ipu->dp_use_count--;
1030 ipu->dmfc_use_count--;
1033 _ipu_dp_uninit(ipu, channel);
1034 ipu->dc_use_count--;
1035 ipu->dp_use_count--;
1036 ipu->dmfc_use_count--;
1039 _ipu_dc_uninit(ipu, 8);
1040 ipu->di_use_count[ipu->dc_di_assignment[8]]--;
1041 ipu->dc_use_count--;
1044 _ipu_dc_uninit(ipu, 9);
1045 ipu->di_use_count[ipu->dc_di_assignment[9]]--;
1046 ipu->dc_use_count--;
1052 if (ipu->ic_use_count == 0)
1053 ipu_conf &= ~IPU_CONF_IC_EN;
1054 if (ipu->vdi_use_count == 0) {
1055 ipu_conf &= ~IPU_CONF_ISP_EN;
1056 ipu_conf &= ~IPU_CONF_VDI_EN;
1057 ipu_conf &= ~IPU_CONF_IC_INPUT;
1059 if (ipu->rot_use_count == 0)
1060 ipu_conf &= ~IPU_CONF_ROT_EN;
1061 if (ipu->dc_use_count == 0)
1062 ipu_conf &= ~IPU_CONF_DC_EN;
1063 if (ipu->dp_use_count == 0)
1064 ipu_conf &= ~IPU_CONF_DP_EN;
1065 if (ipu->dmfc_use_count == 0)
1066 ipu_conf &= ~IPU_CONF_DMFC_EN;
1067 if (ipu->di_use_count[0] == 0) {
1068 ipu_conf &= ~IPU_CONF_DI0_EN;
1070 if (ipu->di_use_count[1] == 0) {
1071 ipu_conf &= ~IPU_CONF_DI1_EN;
1073 if (ipu->smfc_use_count == 0)
1074 ipu_conf &= ~IPU_CONF_SMFC_EN;
1076 ipu_cm_write(ipu, ipu_conf, IPU_CONF);
1078 ipu->channel_init_mask &= ~(1L << IPU_CHAN_ID(channel));
1084 WARN_ON(ipu->ic_use_count < 0);
1085 WARN_ON(ipu->vdi_use_count < 0);
1086 WARN_ON(ipu->rot_use_count < 0);
1087 WARN_ON(ipu->dc_use_count < 0);
1088 WARN_ON(ipu->dp_use_count < 0);
1089 WARN_ON(ipu->dmfc_use_count < 0);
1090 WARN_ON(ipu->smfc_use_count < 0);
1092 EXPORT_SYMBOL(ipu_uninit_channel);
1095 * This function is called to initialize buffer(s) for logical IPU channel.
1097 * @param ipu ipu handler
1099 * @param channel Input parameter for the logical channel ID.
1101 * @param type Input parameter which buffer to initialize.
1103 * @param pixel_fmt Input parameter for pixel format of buffer.
1104 * Pixel format is a FOURCC ASCII code.
1106 * @param width Input parameter for width of buffer in pixels.
1108 * @param height Input parameter for height of buffer in pixels.
1110 * @param stride Input parameter for stride length of buffer
1113 * @param rot_mode Input parameter for rotation setting of buffer.
1114 * A rotation setting other than
1115 * IPU_ROTATE_VERT_FLIP
1116 * should only be used for input buffers of
1117 * rotation channels.
1119 * @param phyaddr_0 Input parameter buffer 0 physical address.
1121 * @param phyaddr_1 Input parameter buffer 1 physical address.
1122 * Setting this to a value other than NULL enables
1123 * double buffering mode.
1125 * @param phyaddr_2 Input parameter buffer 2 physical address.
1126 * Setting this to a value other than NULL enables
1127 * triple buffering mode, phyaddr_1 should not be
1130 * @param u private u offset for additional cropping,
1133 * @param v private v offset for additional cropping,
1136 * @return Returns 0 on success or negative error code on fail
1138 int32_t ipu_init_channel_buffer(struct ipu_soc *ipu, ipu_channel_t channel,
1141 uint16_t width, uint16_t height,
1143 ipu_rotate_mode_t rot_mode,
1144 dma_addr_t phyaddr_0, dma_addr_t phyaddr_1,
1145 dma_addr_t phyaddr_2,
1146 uint32_t u, uint32_t v)
1150 uint32_t burst_size;
1152 dma_chan = channel_2_dma(channel, type);
1153 if (!idma_is_valid(dma_chan))
1156 if (stride < width * bytes_per_pixel(pixel_fmt))
1157 stride = width * bytes_per_pixel(pixel_fmt);
1161 "Stride not 32-bit aligned, stride = %d\n", stride);
1164 /* IC & IRT channels' width must be multiple of 8 pixels */
1165 if ((_ipu_is_ic_chan(dma_chan) || _ipu_is_irt_chan(dma_chan))
1167 dev_err(ipu->dev, "Width must be 8 pixel multiple\n");
1171 /* IPUv3EX and IPUv3M support triple buffer */
1172 if ((!_ipu_is_trb_chan(dma_chan)) && phyaddr_2) {
1173 dev_err(ipu->dev, "Chan%d doesn't support triple buffer "
1174 "mode\n", dma_chan);
1177 if (!phyaddr_1 && phyaddr_2) {
1178 dev_err(ipu->dev, "Chan%d's buf1 physical addr is NULL for "
1179 "triple buffer mode\n", dma_chan);
1185 /* Build parameter memory data for DMA channel */
1186 _ipu_ch_param_init(ipu, dma_chan, pixel_fmt, width, height, stride, u, v, 0,
1187 phyaddr_0, phyaddr_1, phyaddr_2);
1189 /* Set correlative channel parameter of local alpha channel */
1190 if ((_ipu_is_ic_graphic_chan(dma_chan) ||
1191 _ipu_is_dp_graphic_chan(dma_chan)) &&
1192 (ipu->thrd_chan_en[IPU_CHAN_ID(channel)] == true)) {
1193 _ipu_ch_param_set_alpha_use_separate_channel(ipu, dma_chan, true);
1194 _ipu_ch_param_set_alpha_buffer_memory(ipu, dma_chan);
1195 _ipu_ch_param_set_alpha_condition_read(ipu, dma_chan);
1196 /* fix alpha width as 8 and burst size as 16*/
1197 _ipu_ch_params_set_alpha_width(ipu, dma_chan, 8);
1198 _ipu_ch_param_set_burst_size(ipu, dma_chan, 16);
1199 } else if (_ipu_is_ic_graphic_chan(dma_chan) &&
1200 ipu_pixel_format_has_alpha(pixel_fmt))
1201 _ipu_ch_param_set_alpha_use_separate_channel(ipu, dma_chan, false);
1204 _ipu_ch_param_set_rotation(ipu, dma_chan, rot_mode);
1206 /* IC and ROT channels have restriction of 8 or 16 pix burst length */
1207 if (_ipu_is_ic_chan(dma_chan)) {
1208 if ((width % 16) == 0)
1209 _ipu_ch_param_set_burst_size(ipu, dma_chan, 16);
1211 _ipu_ch_param_set_burst_size(ipu, dma_chan, 8);
1212 } else if (_ipu_is_irt_chan(dma_chan)) {
1213 _ipu_ch_param_set_burst_size(ipu, dma_chan, 8);
1214 _ipu_ch_param_set_block_mode(ipu, dma_chan);
1215 } else if (_ipu_is_dmfc_chan(dma_chan)) {
1216 burst_size = _ipu_ch_param_get_burst_size(ipu, dma_chan);
1217 _ipu_dmfc_set_wait4eot(ipu, dma_chan, width);
1218 _ipu_dmfc_set_burst_size(ipu, dma_chan, burst_size);
1221 if (_ipu_disp_chan_is_interlaced(ipu, channel) ||
1222 ipu->chan_is_interlaced[dma_chan])
1223 _ipu_ch_param_set_interlaced_scan(ipu, dma_chan);
1225 if (_ipu_is_ic_chan(dma_chan) || _ipu_is_irt_chan(dma_chan)) {
1226 burst_size = _ipu_ch_param_get_burst_size(ipu, dma_chan);
1227 _ipu_ic_idma_init(ipu, dma_chan, width, height, burst_size,
1229 } else if (_ipu_is_smfc_chan(dma_chan)) {
1230 burst_size = _ipu_ch_param_get_burst_size(ipu, dma_chan);
1231 if ((pixel_fmt == IPU_PIX_FMT_GENERIC) &&
1232 ((_ipu_ch_param_get_bpp(ipu, dma_chan) == 5) ||
1233 (_ipu_ch_param_get_bpp(ipu, dma_chan) == 3)))
1234 burst_size = burst_size >> 4;
1236 burst_size = burst_size >> 2;
1237 _ipu_smfc_set_burst_size(ipu, channel, burst_size-1);
1241 if (idma_is_set(ipu, IDMAC_CHA_PRI, dma_chan)) {
1242 unsigned reg = IDMAC_CH_LOCK_EN_1;
1244 if (cpu_is_mx53() || cpu_is_mx6q()) {
1245 _ipu_ch_param_set_axi_id(ipu, dma_chan, 0);
1281 reg = IDMAC_CH_LOCK_EN_2;
1285 reg = IDMAC_CH_LOCK_EN_2;
1289 reg = IDMAC_CH_LOCK_EN_2;
1293 reg = IDMAC_CH_LOCK_EN_2;
1297 reg = IDMAC_CH_LOCK_EN_2;
1301 reg = IDMAC_CH_LOCK_EN_2;
1307 value |= ipu_idmac_read(ipu, reg);
1308 ipu_idmac_write(ipu, value, reg);
1310 _ipu_ch_param_set_axi_id(ipu, dma_chan, 1);
1313 _ipu_ch_param_set_axi_id(ipu, dma_chan, 1);
1316 _ipu_ch_param_dump(ipu, dma_chan);
1318 if (phyaddr_2 && g_ipu_hw_rev >= 2) {
1319 reg = ipu_cm_read(ipu, IPU_CHA_DB_MODE_SEL(dma_chan));
1320 reg &= ~idma_mask(dma_chan);
1321 ipu_cm_write(ipu, reg, IPU_CHA_DB_MODE_SEL(dma_chan));
1323 reg = ipu_cm_read(ipu, IPU_CHA_TRB_MODE_SEL(dma_chan));
1324 reg |= idma_mask(dma_chan);
1325 ipu_cm_write(ipu, reg, IPU_CHA_TRB_MODE_SEL(dma_chan));
1327 /* Set IDMAC third buffer's cpmem number */
1328 /* See __ipu_ch_get_third_buf_cpmem_num() for mapping */
1329 ipu_idmac_write(ipu, 0x00444047L, IDMAC_SUB_ADDR_4);
1330 ipu_idmac_write(ipu, 0x46004241L, IDMAC_SUB_ADDR_3);
1331 ipu_idmac_write(ipu, 0x00000045L, IDMAC_SUB_ADDR_1);
1333 /* Reset to buffer 0 */
1334 ipu_cm_write(ipu, tri_cur_buf_mask(dma_chan),
1335 IPU_CHA_TRIPLE_CUR_BUF(dma_chan));
1337 reg = ipu_cm_read(ipu, IPU_CHA_TRB_MODE_SEL(dma_chan));
1338 reg &= ~idma_mask(dma_chan);
1339 ipu_cm_write(ipu, reg, IPU_CHA_TRB_MODE_SEL(dma_chan));
1341 reg = ipu_cm_read(ipu, IPU_CHA_DB_MODE_SEL(dma_chan));
1343 reg |= idma_mask(dma_chan);
1345 reg &= ~idma_mask(dma_chan);
1346 ipu_cm_write(ipu, reg, IPU_CHA_DB_MODE_SEL(dma_chan));
1348 /* Reset to buffer 0 */
1349 ipu_cm_write(ipu, idma_mask(dma_chan),
1350 IPU_CHA_CUR_BUF(dma_chan));
1358 EXPORT_SYMBOL(ipu_init_channel_buffer);
1361 * This function is called to update the physical address of a buffer for
1362 * a logical IPU channel.
1364 * @param ipu ipu handler
1365 * @param channel Input parameter for the logical channel ID.
1367 * @param type Input parameter which buffer to initialize.
1369 * @param bufNum Input parameter for buffer number to update.
1370 * 0 or 1 are the only valid values.
1372 * @param phyaddr Input parameter buffer physical address.
1374 * @return This function returns 0 on success or negative error code on
1375 * fail. This function will fail if the buffer is set to ready.
1377 int32_t ipu_update_channel_buffer(struct ipu_soc *ipu, ipu_channel_t channel,
1378 ipu_buffer_t type, uint32_t bufNum, dma_addr_t phyaddr)
1382 uint32_t dma_chan = channel_2_dma(channel, type);
1384 if (dma_chan == IDMA_CHAN_INVALID)
1390 reg = ipu_cm_read(ipu, IPU_CHA_BUF0_RDY(dma_chan));
1391 else if (bufNum == 1)
1392 reg = ipu_cm_read(ipu, IPU_CHA_BUF1_RDY(dma_chan));
1394 reg = ipu_cm_read(ipu, IPU_CHA_BUF2_RDY(dma_chan));
1396 if ((reg & idma_mask(dma_chan)) == 0)
1397 _ipu_ch_param_set_buffer(ipu, dma_chan, bufNum, phyaddr);
1405 EXPORT_SYMBOL(ipu_update_channel_buffer);
1409 * This function is called to initialize a buffer for logical IPU channel.
1411 * @param ipu ipu handler
1412 * @param channel Input parameter for the logical channel ID.
1414 * @param type Input parameter which buffer to initialize.
1416 * @param pixel_fmt Input parameter for pixel format of buffer.
1417 * Pixel format is a FOURCC ASCII code.
1419 * @param width Input parameter for width of buffer in pixels.
1421 * @param height Input parameter for height of buffer in pixels.
1423 * @param stride Input parameter for stride length of buffer
1426 * @param u predefined private u offset for additional cropping,
1429 * @param v predefined private v offset for additional cropping,
1432 * @param vertical_offset vertical offset for Y coordinate
1433 * in the existed frame
1436 * @param horizontal_offset horizontal offset for X coordinate
1437 * in the existed frame
1440 * @return Returns 0 on success or negative error code on fail
1441 * This function will fail if any buffer is set to ready.
1444 int32_t ipu_update_channel_offset(struct ipu_soc *ipu,
1445 ipu_channel_t channel, ipu_buffer_t type,
1447 uint16_t width, uint16_t height,
1449 uint32_t u, uint32_t v,
1450 uint32_t vertical_offset, uint32_t horizontal_offset)
1453 uint32_t dma_chan = channel_2_dma(channel, type);
1455 if (dma_chan == IDMA_CHAN_INVALID)
1460 if ((ipu_cm_read(ipu, IPU_CHA_BUF0_RDY(dma_chan)) & idma_mask(dma_chan)) ||
1461 (ipu_cm_read(ipu, IPU_CHA_BUF1_RDY(dma_chan)) & idma_mask(dma_chan)) ||
1462 ((ipu_cm_read(ipu, IPU_CHA_BUF2_RDY(dma_chan)) & idma_mask(dma_chan)) &&
1463 (ipu_cm_read(ipu, IPU_CHA_TRB_MODE_SEL(dma_chan)) & idma_mask(dma_chan)) &&
1464 _ipu_is_trb_chan(dma_chan)))
1467 _ipu_ch_offset_update(ipu, dma_chan, pixel_fmt, width, height, stride,
1468 u, v, 0, vertical_offset, horizontal_offset);
1473 EXPORT_SYMBOL(ipu_update_channel_offset);
1477 * This function is called to set a channel's buffer as ready.
1479 * @param ipu ipu handler
1480 * @param channel Input parameter for the logical channel ID.
1482 * @param type Input parameter which buffer to initialize.
1484 * @param bufNum Input parameter for which buffer number set to
1487 * @return Returns 0 on success or negative error code on fail
1489 int32_t ipu_select_buffer(struct ipu_soc *ipu, ipu_channel_t channel,
1490 ipu_buffer_t type, uint32_t bufNum)
1492 uint32_t dma_chan = channel_2_dma(channel, type);
1494 if (dma_chan == IDMA_CHAN_INVALID)
1497 /* Mark buffer to be ready. */
1500 ipu_cm_write(ipu, idma_mask(dma_chan),
1501 IPU_CHA_BUF0_RDY(dma_chan));
1502 else if (bufNum == 1)
1503 ipu_cm_write(ipu, idma_mask(dma_chan),
1504 IPU_CHA_BUF1_RDY(dma_chan));
1506 ipu_cm_write(ipu, idma_mask(dma_chan),
1507 IPU_CHA_BUF2_RDY(dma_chan));
1511 EXPORT_SYMBOL(ipu_select_buffer);
1514 * This function is called to set a channel's buffer as ready.
1516 * @param ipu ipu handler
1517 * @param bufNum Input parameter for which buffer number set to
1520 * @return Returns 0 on success or negative error code on fail
1522 int32_t ipu_select_multi_vdi_buffer(struct ipu_soc *ipu, uint32_t bufNum)
1525 uint32_t dma_chan = channel_2_dma(MEM_VDI_PRP_VF_MEM, IPU_INPUT_BUFFER);
1527 idma_mask(channel_2_dma(MEM_VDI_PRP_VF_MEM_P, IPU_INPUT_BUFFER))|
1528 idma_mask(dma_chan)|
1529 idma_mask(channel_2_dma(MEM_VDI_PRP_VF_MEM_N, IPU_INPUT_BUFFER));
1531 /* Mark buffers to be ready. */
1534 ipu_cm_write(ipu, mask_bit, IPU_CHA_BUF0_RDY(dma_chan));
1536 ipu_cm_write(ipu, mask_bit, IPU_CHA_BUF1_RDY(dma_chan));
1540 EXPORT_SYMBOL(ipu_select_multi_vdi_buffer);
1543 static int proc_dest_sel[] = {
1544 0, 1, 1, 3, 5, 5, 4, 7, 8, 9, 10, 11, 12, 14, 15, 16,
1545 0, 1, 1, 5, 5, 5, 5, 5, 7, 8, 9, 10, 11, 12, 14, 31 };
1546 static int proc_src_sel[] = { 0, 6, 7, 6, 7, 8, 5, NA, NA, NA,
1547 NA, NA, NA, NA, NA, 1, 2, 3, 4, 7, 8, NA, 8, NA };
1548 static int disp_src_sel[] = { 0, 6, 7, 8, 3, 4, 5, NA, NA, NA,
1549 NA, NA, NA, NA, NA, 1, NA, 2, NA, 3, 4, 4, 4, 4 };
1553 * This function links 2 channels together for automatic frame
1554 * synchronization. The output of the source channel is linked to the input of
1555 * the destination channel.
1557 * @param ipu ipu handler
1558 * @param src_ch Input parameter for the logical channel ID of
1559 * the source channel.
1561 * @param dest_ch Input parameter for the logical channel ID of
1562 * the destination channel.
1564 * @return This function returns 0 on success or negative error code on
1567 int32_t ipu_link_channels(struct ipu_soc *ipu, ipu_channel_t src_ch, ipu_channel_t dest_ch)
1570 uint32_t fs_proc_flow1;
1571 uint32_t fs_proc_flow2;
1572 uint32_t fs_proc_flow3;
1573 uint32_t fs_disp_flow1;
1577 fs_proc_flow1 = ipu_cm_read(ipu, IPU_FS_PROC_FLOW1);
1578 fs_proc_flow2 = ipu_cm_read(ipu, IPU_FS_PROC_FLOW2);
1579 fs_proc_flow3 = ipu_cm_read(ipu, IPU_FS_PROC_FLOW3);
1580 fs_disp_flow1 = ipu_cm_read(ipu, IPU_FS_DISP_FLOW1);
1584 fs_proc_flow3 &= ~FS_SMFC0_DEST_SEL_MASK;
1586 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1587 FS_SMFC0_DEST_SEL_OFFSET;
1590 fs_proc_flow3 &= ~FS_SMFC1_DEST_SEL_MASK;
1592 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1593 FS_SMFC1_DEST_SEL_OFFSET;
1596 fs_proc_flow3 &= ~FS_SMFC2_DEST_SEL_MASK;
1598 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1599 FS_SMFC2_DEST_SEL_OFFSET;
1602 fs_proc_flow3 &= ~FS_SMFC3_DEST_SEL_MASK;
1604 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1605 FS_SMFC3_DEST_SEL_OFFSET;
1607 case CSI_PRP_ENC_MEM:
1608 fs_proc_flow2 &= ~FS_PRPENC_DEST_SEL_MASK;
1610 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1611 FS_PRPENC_DEST_SEL_OFFSET;
1613 case CSI_PRP_VF_MEM:
1614 fs_proc_flow2 &= ~FS_PRPVF_DEST_SEL_MASK;
1616 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1617 FS_PRPVF_DEST_SEL_OFFSET;
1620 fs_proc_flow2 &= ~FS_PP_DEST_SEL_MASK;
1622 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1623 FS_PP_DEST_SEL_OFFSET;
1625 case MEM_ROT_PP_MEM:
1626 fs_proc_flow2 &= ~FS_PP_ROT_DEST_SEL_MASK;
1628 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1629 FS_PP_ROT_DEST_SEL_OFFSET;
1631 case MEM_PRP_ENC_MEM:
1632 fs_proc_flow2 &= ~FS_PRPENC_DEST_SEL_MASK;
1634 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1635 FS_PRPENC_DEST_SEL_OFFSET;
1637 case MEM_ROT_ENC_MEM:
1638 fs_proc_flow2 &= ~FS_PRPENC_ROT_DEST_SEL_MASK;
1640 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1641 FS_PRPENC_ROT_DEST_SEL_OFFSET;
1643 case MEM_PRP_VF_MEM:
1644 fs_proc_flow2 &= ~FS_PRPVF_DEST_SEL_MASK;
1646 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1647 FS_PRPVF_DEST_SEL_OFFSET;
1649 case MEM_VDI_PRP_VF_MEM:
1650 fs_proc_flow2 &= ~FS_PRPVF_DEST_SEL_MASK;
1652 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1653 FS_PRPVF_DEST_SEL_OFFSET;
1655 case MEM_ROT_VF_MEM:
1656 fs_proc_flow2 &= ~FS_PRPVF_ROT_DEST_SEL_MASK;
1658 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1659 FS_PRPVF_ROT_DEST_SEL_OFFSET;
1668 fs_proc_flow1 &= ~FS_PP_SRC_SEL_MASK;
1670 proc_src_sel[IPU_CHAN_ID(src_ch)] << FS_PP_SRC_SEL_OFFSET;
1672 case MEM_ROT_PP_MEM:
1673 fs_proc_flow1 &= ~FS_PP_ROT_SRC_SEL_MASK;
1675 proc_src_sel[IPU_CHAN_ID(src_ch)] <<
1676 FS_PP_ROT_SRC_SEL_OFFSET;
1678 case MEM_PRP_ENC_MEM:
1679 fs_proc_flow1 &= ~FS_PRP_SRC_SEL_MASK;
1681 proc_src_sel[IPU_CHAN_ID(src_ch)] << FS_PRP_SRC_SEL_OFFSET;
1683 case MEM_ROT_ENC_MEM:
1684 fs_proc_flow1 &= ~FS_PRPENC_ROT_SRC_SEL_MASK;
1686 proc_src_sel[IPU_CHAN_ID(src_ch)] <<
1687 FS_PRPENC_ROT_SRC_SEL_OFFSET;
1689 case MEM_PRP_VF_MEM:
1690 fs_proc_flow1 &= ~FS_PRP_SRC_SEL_MASK;
1692 proc_src_sel[IPU_CHAN_ID(src_ch)] << FS_PRP_SRC_SEL_OFFSET;
1694 case MEM_VDI_PRP_VF_MEM:
1695 fs_proc_flow1 &= ~FS_PRP_SRC_SEL_MASK;
1697 proc_src_sel[IPU_CHAN_ID(src_ch)] << FS_PRP_SRC_SEL_OFFSET;
1699 case MEM_ROT_VF_MEM:
1700 fs_proc_flow1 &= ~FS_PRPVF_ROT_SRC_SEL_MASK;
1702 proc_src_sel[IPU_CHAN_ID(src_ch)] <<
1703 FS_PRPVF_ROT_SRC_SEL_OFFSET;
1706 fs_disp_flow1 &= ~FS_DC1_SRC_SEL_MASK;
1708 disp_src_sel[IPU_CHAN_ID(src_ch)] << FS_DC1_SRC_SEL_OFFSET;
1711 fs_disp_flow1 &= ~FS_DP_SYNC0_SRC_SEL_MASK;
1713 disp_src_sel[IPU_CHAN_ID(src_ch)] <<
1714 FS_DP_SYNC0_SRC_SEL_OFFSET;
1717 fs_disp_flow1 &= ~FS_DP_SYNC1_SRC_SEL_MASK;
1719 disp_src_sel[IPU_CHAN_ID(src_ch)] <<
1720 FS_DP_SYNC1_SRC_SEL_OFFSET;
1723 fs_disp_flow1 &= ~FS_DC2_SRC_SEL_MASK;
1725 disp_src_sel[IPU_CHAN_ID(src_ch)] << FS_DC2_SRC_SEL_OFFSET;
1728 fs_disp_flow1 &= ~FS_DP_ASYNC0_SRC_SEL_MASK;
1730 disp_src_sel[IPU_CHAN_ID(src_ch)] <<
1731 FS_DP_ASYNC0_SRC_SEL_OFFSET;
1734 fs_disp_flow1 &= ~FS_DP_ASYNC1_SRC_SEL_MASK;
1736 disp_src_sel[IPU_CHAN_ID(src_ch)] <<
1737 FS_DP_ASYNC1_SRC_SEL_OFFSET;
1744 ipu_cm_write(ipu, fs_proc_flow1, IPU_FS_PROC_FLOW1);
1745 ipu_cm_write(ipu, fs_proc_flow2, IPU_FS_PROC_FLOW2);
1746 ipu_cm_write(ipu, fs_proc_flow3, IPU_FS_PROC_FLOW3);
1747 ipu_cm_write(ipu, fs_disp_flow1, IPU_FS_DISP_FLOW1);
1753 EXPORT_SYMBOL(ipu_link_channels);
1756 * This function unlinks 2 channels and disables automatic frame
1759 * @param ipu ipu handler
1760 * @param src_ch Input parameter for the logical channel ID of
1761 * the source channel.
1763 * @param dest_ch Input parameter for the logical channel ID of
1764 * the destination channel.
1766 * @return This function returns 0 on success or negative error code on
1769 int32_t ipu_unlink_channels(struct ipu_soc *ipu, ipu_channel_t src_ch, ipu_channel_t dest_ch)
1772 uint32_t fs_proc_flow1;
1773 uint32_t fs_proc_flow2;
1774 uint32_t fs_proc_flow3;
1775 uint32_t fs_disp_flow1;
1779 fs_proc_flow1 = ipu_cm_read(ipu, IPU_FS_PROC_FLOW1);
1780 fs_proc_flow2 = ipu_cm_read(ipu, IPU_FS_PROC_FLOW2);
1781 fs_proc_flow3 = ipu_cm_read(ipu, IPU_FS_PROC_FLOW3);
1782 fs_disp_flow1 = ipu_cm_read(ipu, IPU_FS_DISP_FLOW1);
1786 fs_proc_flow3 &= ~FS_SMFC0_DEST_SEL_MASK;
1789 fs_proc_flow3 &= ~FS_SMFC1_DEST_SEL_MASK;
1792 fs_proc_flow3 &= ~FS_SMFC2_DEST_SEL_MASK;
1795 fs_proc_flow3 &= ~FS_SMFC3_DEST_SEL_MASK;
1797 case CSI_PRP_ENC_MEM:
1798 fs_proc_flow2 &= ~FS_PRPENC_DEST_SEL_MASK;
1800 case CSI_PRP_VF_MEM:
1801 fs_proc_flow2 &= ~FS_PRPVF_DEST_SEL_MASK;
1804 fs_proc_flow2 &= ~FS_PP_DEST_SEL_MASK;
1806 case MEM_ROT_PP_MEM:
1807 fs_proc_flow2 &= ~FS_PP_ROT_DEST_SEL_MASK;
1809 case MEM_PRP_ENC_MEM:
1810 fs_proc_flow2 &= ~FS_PRPENC_DEST_SEL_MASK;
1812 case MEM_ROT_ENC_MEM:
1813 fs_proc_flow2 &= ~FS_PRPENC_ROT_DEST_SEL_MASK;
1815 case MEM_PRP_VF_MEM:
1816 fs_proc_flow2 &= ~FS_PRPVF_DEST_SEL_MASK;
1818 case MEM_VDI_PRP_VF_MEM:
1819 fs_proc_flow2 &= ~FS_PRPVF_DEST_SEL_MASK;
1821 case MEM_ROT_VF_MEM:
1822 fs_proc_flow2 &= ~FS_PRPVF_ROT_DEST_SEL_MASK;
1831 fs_proc_flow1 &= ~FS_PP_SRC_SEL_MASK;
1833 case MEM_ROT_PP_MEM:
1834 fs_proc_flow1 &= ~FS_PP_ROT_SRC_SEL_MASK;
1836 case MEM_PRP_ENC_MEM:
1837 fs_proc_flow1 &= ~FS_PRP_SRC_SEL_MASK;
1839 case MEM_ROT_ENC_MEM:
1840 fs_proc_flow1 &= ~FS_PRPENC_ROT_SRC_SEL_MASK;
1842 case MEM_PRP_VF_MEM:
1843 fs_proc_flow1 &= ~FS_PRP_SRC_SEL_MASK;
1845 case MEM_VDI_PRP_VF_MEM:
1846 fs_proc_flow1 &= ~FS_PRP_SRC_SEL_MASK;
1848 case MEM_ROT_VF_MEM:
1849 fs_proc_flow1 &= ~FS_PRPVF_ROT_SRC_SEL_MASK;
1852 fs_disp_flow1 &= ~FS_DC1_SRC_SEL_MASK;
1855 fs_disp_flow1 &= ~FS_DP_SYNC0_SRC_SEL_MASK;
1858 fs_disp_flow1 &= ~FS_DP_SYNC1_SRC_SEL_MASK;
1861 fs_disp_flow1 &= ~FS_DC2_SRC_SEL_MASK;
1864 fs_disp_flow1 &= ~FS_DP_ASYNC0_SRC_SEL_MASK;
1867 fs_disp_flow1 &= ~FS_DP_ASYNC1_SRC_SEL_MASK;
1874 ipu_cm_write(ipu, fs_proc_flow1, IPU_FS_PROC_FLOW1);
1875 ipu_cm_write(ipu, fs_proc_flow2, IPU_FS_PROC_FLOW2);
1876 ipu_cm_write(ipu, fs_proc_flow3, IPU_FS_PROC_FLOW3);
1877 ipu_cm_write(ipu, fs_disp_flow1, IPU_FS_DISP_FLOW1);
1883 EXPORT_SYMBOL(ipu_unlink_channels);
1886 * This function check whether a logical channel was enabled.
1888 * @param ipu ipu handler
1889 * @param channel Input parameter for the logical channel ID.
1891 * @return This function returns 1 while request channel is enabled or
1892 * 0 for not enabled.
1894 int32_t ipu_is_channel_busy(struct ipu_soc *ipu, ipu_channel_t channel)
1900 out_dma = channel_2_dma(channel, IPU_OUTPUT_BUFFER);
1901 in_dma = channel_2_dma(channel, IPU_VIDEO_IN_BUFFER);
1903 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(in_dma));
1904 if (reg & idma_mask(in_dma))
1906 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(out_dma));
1907 if (reg & idma_mask(out_dma))
1911 EXPORT_SYMBOL(ipu_is_channel_busy);
1914 * This function enables a logical channel.
1916 * @param ipu ipu handler
1917 * @param channel Input parameter for the logical channel ID.
1919 * @return This function returns 0 on success or negative error code on
1922 int32_t ipu_enable_channel(struct ipu_soc *ipu, ipu_channel_t channel)
1933 if (ipu->channel_enable_mask & (1L << IPU_CHAN_ID(channel))) {
1934 dev_err(ipu->dev, "Warning: channel already enabled %d\n",
1935 IPU_CHAN_ID(channel));
1940 /* Get input and output dma channels */
1941 out_dma = channel_2_dma(channel, IPU_OUTPUT_BUFFER);
1942 in_dma = channel_2_dma(channel, IPU_VIDEO_IN_BUFFER);
1944 ipu_conf = ipu_cm_read(ipu, IPU_CONF);
1945 if (ipu->di_use_count[0] > 0) {
1946 ipu_conf |= IPU_CONF_DI0_EN;
1948 if (ipu->di_use_count[1] > 0) {
1949 ipu_conf |= IPU_CONF_DI1_EN;
1951 if (ipu->dp_use_count > 0)
1952 ipu_conf |= IPU_CONF_DP_EN;
1953 if (ipu->dc_use_count > 0)
1954 ipu_conf |= IPU_CONF_DC_EN;
1955 if (ipu->dmfc_use_count > 0)
1956 ipu_conf |= IPU_CONF_DMFC_EN;
1957 if (ipu->ic_use_count > 0)
1958 ipu_conf |= IPU_CONF_IC_EN;
1959 if (ipu->vdi_use_count > 0) {
1960 ipu_conf |= IPU_CONF_ISP_EN;
1961 ipu_conf |= IPU_CONF_VDI_EN;
1962 ipu_conf |= IPU_CONF_IC_INPUT;
1964 if (ipu->rot_use_count > 0)
1965 ipu_conf |= IPU_CONF_ROT_EN;
1966 if (ipu->smfc_use_count > 0)
1967 ipu_conf |= IPU_CONF_SMFC_EN;
1968 ipu_cm_write(ipu, ipu_conf, IPU_CONF);
1970 if (idma_is_valid(in_dma)) {
1971 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(in_dma));
1972 ipu_idmac_write(ipu, reg | idma_mask(in_dma), IDMAC_CHA_EN(in_dma));
1974 if (idma_is_valid(out_dma)) {
1975 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(out_dma));
1976 ipu_idmac_write(ipu, reg | idma_mask(out_dma), IDMAC_CHA_EN(out_dma));
1979 if ((ipu->sec_chan_en[IPU_CHAN_ID(channel)]) &&
1980 ((channel == MEM_PP_MEM) || (channel == MEM_PRP_VF_MEM) ||
1981 (channel == MEM_VDI_PRP_VF_MEM))) {
1982 sec_dma = channel_2_dma(channel, IPU_GRAPH_IN_BUFFER);
1983 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(sec_dma));
1984 ipu_idmac_write(ipu, reg | idma_mask(sec_dma), IDMAC_CHA_EN(sec_dma));
1986 if ((ipu->thrd_chan_en[IPU_CHAN_ID(channel)]) &&
1987 ((channel == MEM_PP_MEM) || (channel == MEM_PRP_VF_MEM))) {
1988 thrd_dma = channel_2_dma(channel, IPU_ALPHA_IN_BUFFER);
1989 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(thrd_dma));
1990 ipu_idmac_write(ipu, reg | idma_mask(thrd_dma), IDMAC_CHA_EN(thrd_dma));
1992 sec_dma = channel_2_dma(channel, IPU_GRAPH_IN_BUFFER);
1993 reg = ipu_idmac_read(ipu, IDMAC_SEP_ALPHA);
1994 ipu_idmac_write(ipu, reg | idma_mask(sec_dma), IDMAC_SEP_ALPHA);
1995 } else if ((ipu->thrd_chan_en[IPU_CHAN_ID(channel)]) &&
1996 ((channel == MEM_BG_SYNC) || (channel == MEM_FG_SYNC))) {
1997 thrd_dma = channel_2_dma(channel, IPU_ALPHA_IN_BUFFER);
1998 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(thrd_dma));
1999 ipu_idmac_write(ipu, reg | idma_mask(thrd_dma), IDMAC_CHA_EN(thrd_dma));
2000 reg = ipu_idmac_read(ipu, IDMAC_SEP_ALPHA);
2001 ipu_idmac_write(ipu, reg | idma_mask(in_dma), IDMAC_SEP_ALPHA);
2004 if ((channel == MEM_DC_SYNC) || (channel == MEM_BG_SYNC) ||
2005 (channel == MEM_FG_SYNC)) {
2006 reg = ipu_idmac_read(ipu, IDMAC_WM_EN(in_dma));
2007 ipu_idmac_write(ipu, reg | idma_mask(in_dma), IDMAC_WM_EN(in_dma));
2009 _ipu_dp_dc_enable(ipu, channel);
2012 if (_ipu_is_ic_chan(in_dma) || _ipu_is_ic_chan(out_dma) ||
2013 _ipu_is_irt_chan(in_dma) || _ipu_is_irt_chan(out_dma))
2014 _ipu_ic_enable_task(ipu, channel);
2016 ipu->channel_enable_mask |= 1L << IPU_CHAN_ID(channel);
2022 EXPORT_SYMBOL(ipu_enable_channel);
2025 * This function check buffer ready for a logical channel.
2027 * @param ipu ipu handler
2028 * @param channel Input parameter for the logical channel ID.
2030 * @param type Input parameter which buffer to clear.
2032 * @param bufNum Input parameter for which buffer number clear
2036 int32_t ipu_check_buffer_ready(struct ipu_soc *ipu, ipu_channel_t channel, ipu_buffer_t type,
2039 uint32_t dma_chan = channel_2_dma(channel, type);
2042 if (dma_chan == IDMA_CHAN_INVALID)
2046 reg = ipu_cm_read(ipu, IPU_CHA_BUF0_RDY(dma_chan));
2047 else if (bufNum == 1)
2048 reg = ipu_cm_read(ipu, IPU_CHA_BUF1_RDY(dma_chan));
2050 reg = ipu_cm_read(ipu, IPU_CHA_BUF2_RDY(dma_chan));
2052 if (reg & idma_mask(dma_chan))
2057 EXPORT_SYMBOL(ipu_check_buffer_ready);
2060 * This function clear buffer ready for a logical channel.
2062 * @param ipu ipu handler
2063 * @param channel Input parameter for the logical channel ID.
2065 * @param type Input parameter which buffer to clear.
2067 * @param bufNum Input parameter for which buffer number clear
2071 void _ipu_clear_buffer_ready(struct ipu_soc *ipu, ipu_channel_t channel, ipu_buffer_t type,
2074 uint32_t dma_ch = channel_2_dma(channel, type);
2076 if (!idma_is_valid(dma_ch))
2079 ipu_cm_write(ipu, 0xF0300000, IPU_GPR); /* write one to clear */
2081 ipu_cm_write(ipu, idma_mask(dma_ch),
2082 IPU_CHA_BUF0_RDY(dma_ch));
2083 else if (bufNum == 1)
2084 ipu_cm_write(ipu, idma_mask(dma_ch),
2085 IPU_CHA_BUF1_RDY(dma_ch));
2087 ipu_cm_write(ipu, idma_mask(dma_ch),
2088 IPU_CHA_BUF2_RDY(dma_ch));
2089 ipu_cm_write(ipu, 0x0, IPU_GPR); /* write one to set */
2092 void ipu_clear_buffer_ready(struct ipu_soc *ipu, ipu_channel_t channel, ipu_buffer_t type,
2096 _ipu_clear_buffer_ready(ipu, channel, type, bufNum);
2099 EXPORT_SYMBOL(ipu_clear_buffer_ready);
2102 * This function disables a logical channel.
2104 * @param ipu ipu handler
2105 * @param channel Input parameter for the logical channel ID.
2107 * @param wait_for_stop Flag to set whether to wait for channel end
2108 * of frame or return immediately.
2110 * @return This function returns 0 on success or negative error code on
2113 int32_t ipu_disable_channel(struct ipu_soc *ipu, ipu_channel_t channel, bool wait_for_stop)
2118 uint32_t sec_dma = NO_DMA;
2119 uint32_t thrd_dma = NO_DMA;
2120 uint16_t fg_pos_x, fg_pos_y;
2124 if ((ipu->channel_enable_mask & (1L << IPU_CHAN_ID(channel))) == 0) {
2125 dev_err(ipu->dev, "Channel already disabled %d\n",
2126 IPU_CHAN_ID(channel));
2131 /* Get input and output dma channels */
2132 out_dma = channel_2_dma(channel, IPU_OUTPUT_BUFFER);
2133 in_dma = channel_2_dma(channel, IPU_VIDEO_IN_BUFFER);
2135 if ((idma_is_valid(in_dma) &&
2136 !idma_is_set(ipu, IDMAC_CHA_EN, in_dma))
2137 && (idma_is_valid(out_dma) &&
2138 !idma_is_set(ipu, IDMAC_CHA_EN, out_dma))) {
2143 if (ipu->sec_chan_en[IPU_CHAN_ID(channel)])
2144 sec_dma = channel_2_dma(channel, IPU_GRAPH_IN_BUFFER);
2145 if (ipu->thrd_chan_en[IPU_CHAN_ID(channel)]) {
2146 sec_dma = channel_2_dma(channel, IPU_GRAPH_IN_BUFFER);
2147 thrd_dma = channel_2_dma(channel, IPU_ALPHA_IN_BUFFER);
2150 if ((channel == MEM_BG_SYNC) || (channel == MEM_FG_SYNC) ||
2151 (channel == MEM_DC_SYNC)) {
2152 if (channel == MEM_FG_SYNC) {
2153 _ipu_disp_get_window_pos(ipu, channel, &fg_pos_x, &fg_pos_y);
2154 _ipu_disp_set_window_pos(ipu, channel, 0, 0);
2157 _ipu_dp_dc_disable(ipu, channel, false);
2160 * wait for BG channel EOF then disable FG-IDMAC,
2161 * it avoid FG NFB4EOF error.
2163 if ((channel == MEM_FG_SYNC) && (ipu_is_channel_busy(ipu, MEM_BG_SYNC))) {
2166 ipu_cm_write(ipu, IPUIRQ_2_MASK(IPU_IRQ_BG_SYNC_EOF),
2167 IPUIRQ_2_STATREG(IPU_IRQ_BG_SYNC_EOF));
2168 while ((ipu_cm_read(ipu, IPUIRQ_2_STATREG(IPU_IRQ_BG_SYNC_EOF)) &
2169 IPUIRQ_2_MASK(IPU_IRQ_BG_SYNC_EOF)) == 0) {
2173 dev_err(ipu->dev, "warning: wait for bg sync eof timeout\n");
2178 } else if (wait_for_stop) {
2179 while (idma_is_set(ipu, IDMAC_CHA_BUSY, in_dma) ||
2180 idma_is_set(ipu, IDMAC_CHA_BUSY, out_dma) ||
2181 (ipu->sec_chan_en[IPU_CHAN_ID(channel)] &&
2182 idma_is_set(ipu, IDMAC_CHA_BUSY, sec_dma)) ||
2183 (ipu->thrd_chan_en[IPU_CHAN_ID(channel)] &&
2184 idma_is_set(ipu, IDMAC_CHA_BUSY, thrd_dma))) {
2185 uint32_t irq = 0xffffffff;
2188 if (idma_is_set(ipu, IDMAC_CHA_BUSY, out_dma))
2190 if (ipu->sec_chan_en[IPU_CHAN_ID(channel)] &&
2191 idma_is_set(ipu, IDMAC_CHA_BUSY, sec_dma))
2193 if (ipu->thrd_chan_en[IPU_CHAN_ID(channel)] &&
2194 idma_is_set(ipu, IDMAC_CHA_BUSY, thrd_dma))
2196 if (idma_is_set(ipu, IDMAC_CHA_BUSY, in_dma))
2199 if (irq == 0xffffffff) {
2200 dev_dbg(ipu->dev, "warning: no channel busy, break\n");
2204 ipu_cm_write(ipu, IPUIRQ_2_MASK(irq),
2205 IPUIRQ_2_STATREG(irq));
2207 dev_dbg(ipu->dev, "warning: channel %d busy, need wait\n", irq);
2209 while (((ipu_cm_read(ipu, IPUIRQ_2_STATREG(irq))
2210 & IPUIRQ_2_MASK(irq)) == 0) &&
2211 (idma_is_set(ipu, IDMAC_CHA_BUSY, irq))) {
2215 ipu_dump_registers(ipu);
2216 dev_err(ipu->dev, "warning: disable ipu dma channel %d during its busy state\n", irq);
2224 if ((channel == MEM_BG_SYNC) || (channel == MEM_FG_SYNC) ||
2225 (channel == MEM_DC_SYNC)) {
2226 reg = ipu_idmac_read(ipu, IDMAC_WM_EN(in_dma));
2227 ipu_idmac_write(ipu, reg & ~idma_mask(in_dma), IDMAC_WM_EN(in_dma));
2230 /* Disable IC task */
2231 if (_ipu_is_ic_chan(in_dma) || _ipu_is_ic_chan(out_dma) ||
2232 _ipu_is_irt_chan(in_dma) || _ipu_is_irt_chan(out_dma))
2233 _ipu_ic_disable_task(ipu, channel);
2235 /* Disable DMA channel(s) */
2236 if (idma_is_valid(in_dma)) {
2237 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(in_dma));
2238 ipu_idmac_write(ipu, reg & ~idma_mask(in_dma), IDMAC_CHA_EN(in_dma));
2239 ipu_cm_write(ipu, idma_mask(in_dma), IPU_CHA_CUR_BUF(in_dma));
2240 ipu_cm_write(ipu, tri_cur_buf_mask(in_dma),
2241 IPU_CHA_TRIPLE_CUR_BUF(in_dma));
2243 if (idma_is_valid(out_dma)) {
2244 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(out_dma));
2245 ipu_idmac_write(ipu, reg & ~idma_mask(out_dma), IDMAC_CHA_EN(out_dma));
2246 ipu_cm_write(ipu, idma_mask(out_dma), IPU_CHA_CUR_BUF(out_dma));
2247 ipu_cm_write(ipu, tri_cur_buf_mask(out_dma),
2248 IPU_CHA_TRIPLE_CUR_BUF(out_dma));
2250 if (ipu->sec_chan_en[IPU_CHAN_ID(channel)] && idma_is_valid(sec_dma)) {
2251 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(sec_dma));
2252 ipu_idmac_write(ipu, reg & ~idma_mask(sec_dma), IDMAC_CHA_EN(sec_dma));
2253 ipu_cm_write(ipu, idma_mask(sec_dma), IPU_CHA_CUR_BUF(sec_dma));
2255 if (ipu->thrd_chan_en[IPU_CHAN_ID(channel)] && idma_is_valid(thrd_dma)) {
2256 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(thrd_dma));
2257 ipu_idmac_write(ipu, reg & ~idma_mask(thrd_dma), IDMAC_CHA_EN(thrd_dma));
2258 if (channel == MEM_BG_SYNC || channel == MEM_FG_SYNC) {
2259 reg = ipu_idmac_read(ipu, IDMAC_SEP_ALPHA);
2260 ipu_idmac_write(ipu, reg & ~idma_mask(in_dma), IDMAC_SEP_ALPHA);
2262 reg = ipu_idmac_read(ipu, IDMAC_SEP_ALPHA);
2263 ipu_idmac_write(ipu, reg & ~idma_mask(sec_dma), IDMAC_SEP_ALPHA);
2265 ipu_cm_write(ipu, idma_mask(thrd_dma), IPU_CHA_CUR_BUF(thrd_dma));
2268 if (channel == MEM_FG_SYNC)
2269 _ipu_disp_set_window_pos(ipu, channel, fg_pos_x, fg_pos_y);
2271 /* Set channel buffers NOT to be ready */
2272 if (idma_is_valid(in_dma)) {
2273 _ipu_clear_buffer_ready(ipu, channel, IPU_VIDEO_IN_BUFFER, 0);
2274 _ipu_clear_buffer_ready(ipu, channel, IPU_VIDEO_IN_BUFFER, 1);
2275 _ipu_clear_buffer_ready(ipu, channel, IPU_VIDEO_IN_BUFFER, 2);
2277 if (idma_is_valid(out_dma)) {
2278 _ipu_clear_buffer_ready(ipu, channel, IPU_OUTPUT_BUFFER, 0);
2279 _ipu_clear_buffer_ready(ipu, channel, IPU_OUTPUT_BUFFER, 1);
2281 if (ipu->sec_chan_en[IPU_CHAN_ID(channel)] && idma_is_valid(sec_dma)) {
2282 _ipu_clear_buffer_ready(ipu, channel, IPU_GRAPH_IN_BUFFER, 0);
2283 _ipu_clear_buffer_ready(ipu, channel, IPU_GRAPH_IN_BUFFER, 1);
2285 if (ipu->thrd_chan_en[IPU_CHAN_ID(channel)] && idma_is_valid(thrd_dma)) {
2286 _ipu_clear_buffer_ready(ipu, channel, IPU_ALPHA_IN_BUFFER, 0);
2287 _ipu_clear_buffer_ready(ipu, channel, IPU_ALPHA_IN_BUFFER, 1);
2290 ipu->channel_enable_mask &= ~(1L << IPU_CHAN_ID(channel));
2296 EXPORT_SYMBOL(ipu_disable_channel);
2299 * This function enables CSI.
2301 * @param ipu ipu handler
2302 * @param csi csi num 0 or 1
2304 * @return This function returns 0 on success or negative error code on
2307 int32_t ipu_enable_csi(struct ipu_soc *ipu, uint32_t csi)
2312 dev_err(ipu->dev, "Wrong csi num_%d\n", csi);
2317 ipu->csi_use_count[csi]++;
2319 if (ipu->csi_use_count[csi] == 1) {
2320 reg = ipu_cm_read(ipu, IPU_CONF);
2322 ipu_cm_write(ipu, reg | IPU_CONF_CSI0_EN, IPU_CONF);
2324 ipu_cm_write(ipu, reg | IPU_CONF_CSI1_EN, IPU_CONF);
2329 EXPORT_SYMBOL(ipu_enable_csi);
2332 * This function disables CSI.
2334 * @param ipu ipu handler
2335 * @param csi csi num 0 or 1
2337 * @return This function returns 0 on success or negative error code on
2340 int32_t ipu_disable_csi(struct ipu_soc *ipu, uint32_t csi)
2345 dev_err(ipu->dev, "Wrong csi num_%d\n", csi);
2350 ipu->csi_use_count[csi]--;
2351 if (ipu->csi_use_count[csi] == 0) {
2352 reg = ipu_cm_read(ipu, IPU_CONF);
2354 ipu_cm_write(ipu, reg & ~IPU_CONF_CSI0_EN, IPU_CONF);
2356 ipu_cm_write(ipu, reg & ~IPU_CONF_CSI1_EN, IPU_CONF);
2361 EXPORT_SYMBOL(ipu_disable_csi);
2363 static irqreturn_t ipu_irq_handler(int irq, void *desc)
2365 struct ipu_soc *ipu = desc;
2368 irqreturn_t result = IRQ_NONE;
2370 const int err_reg[] = { 5, 6, 9, 10, 0 };
2371 const int int_reg[] = { 1, 2, 3, 4, 11, 12, 13, 14, 15, 0 };
2372 unsigned long lock_flags;
2375 if (err_reg[i] == 0)
2378 spin_lock_irqsave(&ipu->spin_lock, lock_flags);
2380 int_stat = ipu_cm_read(ipu, IPU_INT_STAT(err_reg[i]));
2381 int_stat &= ipu_cm_read(ipu, IPU_INT_CTRL(err_reg[i]));
2383 ipu_cm_write(ipu, int_stat, IPU_INT_STAT(err_reg[i]));
2385 "IPU Error - IPU_INT_STAT_%d = 0x%08X\n",
2386 err_reg[i], int_stat);
2387 /* Disable interrupts so we only get error once */
2389 ipu_cm_read(ipu, IPU_INT_CTRL(err_reg[i])) & ~int_stat;
2390 ipu_cm_write(ipu, int_stat, IPU_INT_CTRL(err_reg[i]));
2393 spin_unlock_irqrestore(&ipu->spin_lock, lock_flags);
2397 if (int_reg[i] == 0)
2399 spin_lock_irqsave(&ipu->spin_lock, lock_flags);
2400 int_stat = ipu_cm_read(ipu, IPU_INT_STAT(int_reg[i]));
2401 int_stat &= ipu_cm_read(ipu, IPU_INT_CTRL(int_reg[i]));
2402 ipu_cm_write(ipu, int_stat, IPU_INT_STAT(int_reg[i]));
2403 spin_unlock_irqrestore(&ipu->spin_lock, lock_flags);
2404 while ((line = ffs(int_stat)) != 0) {
2406 int_stat &= ~(1UL << line);
2407 line += (int_reg[i] - 1) * 32;
2409 ipu->irq_list[line].handler(line,
2410 ipu->irq_list[line].
2419 * This function enables the interrupt for the specified interrupt line.
2420 * The interrupt lines are defined in \b ipu_irq_line enum.
2422 * @param ipu ipu handler
2423 * @param irq Interrupt line to enable interrupt for.
2426 void ipu_enable_irq(struct ipu_soc *ipu, uint32_t irq)
2429 unsigned long lock_flags;
2433 spin_lock_irqsave(&ipu->spin_lock, lock_flags);
2435 reg = ipu_cm_read(ipu, IPUIRQ_2_CTRLREG(irq));
2436 reg |= IPUIRQ_2_MASK(irq);
2437 ipu_cm_write(ipu, reg, IPUIRQ_2_CTRLREG(irq));
2439 spin_unlock_irqrestore(&ipu->spin_lock, lock_flags);
2443 EXPORT_SYMBOL(ipu_enable_irq);
2446 * This function disables the interrupt for the specified interrupt line.
2447 * The interrupt lines are defined in \b ipu_irq_line enum.
2449 * @param ipu ipu handler
2450 * @param irq Interrupt line to disable interrupt for.
2453 void ipu_disable_irq(struct ipu_soc *ipu, uint32_t irq)
2456 unsigned long lock_flags;
2460 spin_lock_irqsave(&ipu->spin_lock, lock_flags);
2462 reg = ipu_cm_read(ipu, IPUIRQ_2_CTRLREG(irq));
2463 reg &= ~IPUIRQ_2_MASK(irq);
2464 ipu_cm_write(ipu, reg, IPUIRQ_2_CTRLREG(irq));
2466 spin_unlock_irqrestore(&ipu->spin_lock, lock_flags);
2470 EXPORT_SYMBOL(ipu_disable_irq);
2473 * This function clears the interrupt for the specified interrupt line.
2474 * The interrupt lines are defined in \b ipu_irq_line enum.
2476 * @param ipu ipu handler
2477 * @param irq Interrupt line to clear interrupt for.
2480 void ipu_clear_irq(struct ipu_soc *ipu, uint32_t irq)
2482 unsigned long lock_flags;
2486 spin_lock_irqsave(&ipu->spin_lock, lock_flags);
2488 ipu_cm_write(ipu, IPUIRQ_2_MASK(irq), IPUIRQ_2_STATREG(irq));
2490 spin_unlock_irqrestore(&ipu->spin_lock, lock_flags);
2494 EXPORT_SYMBOL(ipu_clear_irq);
2497 * This function returns the current interrupt status for the specified
2498 * interrupt line. The interrupt lines are defined in \b ipu_irq_line enum.
2500 * @param ipu ipu handler
2501 * @param irq Interrupt line to get status for.
2503 * @return Returns true if the interrupt is pending/asserted or false if
2504 * the interrupt is not pending.
2506 bool ipu_get_irq_status(struct ipu_soc *ipu, uint32_t irq)
2512 reg = ipu_cm_read(ipu, IPUIRQ_2_STATREG(irq));
2516 if (reg & IPUIRQ_2_MASK(irq))
2521 EXPORT_SYMBOL(ipu_get_irq_status);
2524 * This function registers an interrupt handler function for the specified
2525 * interrupt line. The interrupt lines are defined in \b ipu_irq_line enum.
2527 * @param ipu ipu handler
2528 * @param irq Interrupt line to get status for.
2530 * @param handler Input parameter for address of the handler
2533 * @param irq_flags Flags for interrupt mode. Currently not used.
2535 * @param devname Input parameter for string name of driver
2536 * registering the handler.
2538 * @param dev_id Input parameter for pointer of data to be
2539 * passed to the handler.
2541 * @return This function returns 0 on success or negative error code on
2544 int ipu_request_irq(struct ipu_soc *ipu, uint32_t irq,
2545 irqreturn_t(*handler) (int, void *),
2546 uint32_t irq_flags, const char *devname, void *dev_id)
2548 unsigned long lock_flags;
2550 BUG_ON(irq >= IPU_IRQ_COUNT);
2554 spin_lock_irqsave(&ipu->spin_lock, lock_flags);
2556 if (ipu->irq_list[irq].handler != NULL) {
2558 "handler already installed on irq %d\n", irq);
2559 spin_unlock_irqrestore(&ipu->spin_lock, lock_flags);
2563 ipu->irq_list[irq].handler = handler;
2564 ipu->irq_list[irq].flags = irq_flags;
2565 ipu->irq_list[irq].dev_id = dev_id;
2566 ipu->irq_list[irq].name = devname;
2568 /* clear irq stat for previous use */
2569 ipu_cm_write(ipu, IPUIRQ_2_MASK(irq), IPUIRQ_2_STATREG(irq));
2571 spin_unlock_irqrestore(&ipu->spin_lock, lock_flags);
2575 ipu_enable_irq(ipu, irq); /* enable the interrupt */
2579 EXPORT_SYMBOL(ipu_request_irq);
2582 * This function unregisters an interrupt handler for the specified interrupt
2583 * line. The interrupt lines are defined in \b ipu_irq_line enum.
2585 * @param ipu ipu handler
2586 * @param irq Interrupt line to get status for.
2588 * @param dev_id Input parameter for pointer of data to be passed
2589 * to the handler. This must match value passed to
2590 * ipu_request_irq().
2593 void ipu_free_irq(struct ipu_soc *ipu, uint32_t irq, void *dev_id)
2595 unsigned long lock_flags;
2597 ipu_disable_irq(ipu, irq); /* disable the interrupt */
2599 spin_lock_irqsave(&ipu->spin_lock, lock_flags);
2600 if (ipu->irq_list[irq].dev_id == dev_id)
2601 ipu->irq_list[irq].handler = NULL;
2602 spin_unlock_irqrestore(&ipu->spin_lock, lock_flags);
2604 EXPORT_SYMBOL(ipu_free_irq);
2606 uint32_t ipu_get_cur_buffer_idx(struct ipu_soc *ipu, ipu_channel_t channel, ipu_buffer_t type)
2608 uint32_t reg, dma_chan;
2610 dma_chan = channel_2_dma(channel, type);
2611 if (!idma_is_valid(dma_chan))
2614 reg = ipu_cm_read(ipu, IPU_CHA_TRB_MODE_SEL(dma_chan));
2615 if ((reg & idma_mask(dma_chan)) && _ipu_is_trb_chan(dma_chan)) {
2616 reg = ipu_cm_read(ipu, IPU_CHA_TRIPLE_CUR_BUF(dma_chan));
2617 return (reg & tri_cur_buf_mask(dma_chan)) >>
2618 tri_cur_buf_shift(dma_chan);
2620 reg = ipu_cm_read(ipu, IPU_CHA_CUR_BUF(dma_chan));
2621 if (reg & idma_mask(dma_chan))
2627 EXPORT_SYMBOL(ipu_get_cur_buffer_idx);
2629 uint32_t _ipu_channel_status(struct ipu_soc *ipu, ipu_channel_t channel)
2632 uint32_t task_stat_reg = ipu_cm_read(ipu, IPU_PROC_TASK_STAT);
2635 case MEM_PRP_VF_MEM:
2636 stat = (task_stat_reg & TSTAT_VF_MASK) >> TSTAT_VF_OFFSET;
2638 case MEM_VDI_PRP_VF_MEM:
2639 stat = (task_stat_reg & TSTAT_VF_MASK) >> TSTAT_VF_OFFSET;
2641 case MEM_ROT_VF_MEM:
2643 (task_stat_reg & TSTAT_VF_ROT_MASK) >> TSTAT_VF_ROT_OFFSET;
2645 case MEM_PRP_ENC_MEM:
2646 stat = (task_stat_reg & TSTAT_ENC_MASK) >> TSTAT_ENC_OFFSET;
2648 case MEM_ROT_ENC_MEM:
2650 (task_stat_reg & TSTAT_ENC_ROT_MASK) >>
2651 TSTAT_ENC_ROT_OFFSET;
2654 stat = (task_stat_reg & TSTAT_PP_MASK) >> TSTAT_PP_OFFSET;
2656 case MEM_ROT_PP_MEM:
2658 (task_stat_reg & TSTAT_PP_ROT_MASK) >> TSTAT_PP_ROT_OFFSET;
2662 stat = TASK_STAT_IDLE;
2668 int32_t ipu_swap_channel(struct ipu_soc *ipu, ipu_channel_t from_ch, ipu_channel_t to_ch)
2672 int from_dma = channel_2_dma(from_ch, IPU_INPUT_BUFFER);
2673 int to_dma = channel_2_dma(to_ch, IPU_INPUT_BUFFER);
2677 /* enable target channel */
2678 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(to_dma));
2679 ipu_idmac_write(ipu, reg | idma_mask(to_dma), IDMAC_CHA_EN(to_dma));
2681 ipu->channel_enable_mask |= 1L << IPU_CHAN_ID(to_ch);
2684 _ipu_dp_dc_disable(ipu, from_ch, true);
2686 /* disable source channel */
2687 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(from_dma));
2688 ipu_idmac_write(ipu, reg & ~idma_mask(from_dma), IDMAC_CHA_EN(from_dma));
2689 ipu_cm_write(ipu, idma_mask(from_dma), IPU_CHA_CUR_BUF(from_dma));
2690 ipu_cm_write(ipu, tri_cur_buf_mask(from_dma),
2691 IPU_CHA_TRIPLE_CUR_BUF(from_dma));
2693 ipu->channel_enable_mask &= ~(1L << IPU_CHAN_ID(from_ch));
2695 _ipu_clear_buffer_ready(ipu, from_ch, IPU_VIDEO_IN_BUFFER, 0);
2696 _ipu_clear_buffer_ready(ipu, from_ch, IPU_VIDEO_IN_BUFFER, 1);
2697 _ipu_clear_buffer_ready(ipu, from_ch, IPU_VIDEO_IN_BUFFER, 2);
2703 EXPORT_SYMBOL(ipu_swap_channel);
2705 uint32_t bytes_per_pixel(uint32_t fmt)
2708 case IPU_PIX_FMT_GENERIC: /*generic data */
2709 case IPU_PIX_FMT_RGB332:
2710 case IPU_PIX_FMT_YUV420P:
2711 case IPU_PIX_FMT_YVU420P:
2712 case IPU_PIX_FMT_YUV422P:
2715 case IPU_PIX_FMT_RGB565:
2716 case IPU_PIX_FMT_YUYV:
2717 case IPU_PIX_FMT_UYVY:
2720 case IPU_PIX_FMT_BGR24:
2721 case IPU_PIX_FMT_RGB24:
2724 case IPU_PIX_FMT_GENERIC_32: /*generic data */
2725 case IPU_PIX_FMT_BGR32:
2726 case IPU_PIX_FMT_BGRA32:
2727 case IPU_PIX_FMT_RGB32:
2728 case IPU_PIX_FMT_RGBA32:
2729 case IPU_PIX_FMT_ABGR32:
2738 EXPORT_SYMBOL(bytes_per_pixel);
2740 ipu_color_space_t format_to_colorspace(uint32_t fmt)
2743 case IPU_PIX_FMT_RGB666:
2744 case IPU_PIX_FMT_RGB565:
2745 case IPU_PIX_FMT_BGR24:
2746 case IPU_PIX_FMT_RGB24:
2747 case IPU_PIX_FMT_GBR24:
2748 case IPU_PIX_FMT_BGR32:
2749 case IPU_PIX_FMT_BGRA32:
2750 case IPU_PIX_FMT_RGB32:
2751 case IPU_PIX_FMT_RGBA32:
2752 case IPU_PIX_FMT_ABGR32:
2753 case IPU_PIX_FMT_LVDS666:
2754 case IPU_PIX_FMT_LVDS888:
2765 bool ipu_pixel_format_has_alpha(uint32_t fmt)
2768 case IPU_PIX_FMT_RGBA32:
2769 case IPU_PIX_FMT_BGRA32:
2770 case IPU_PIX_FMT_ABGR32:
2780 static int ipu_suspend(struct platform_device *pdev, pm_message_t state)
2782 struct imx_ipuv3_platform_data *plat_data = pdev->dev.platform_data;
2783 struct ipu_soc *ipu = platform_get_drvdata(pdev);
2785 if (atomic_read(&ipu->ipu_use_count)) {
2786 /* save and disable enabled channels*/
2787 ipu->idma_enable_reg[0] = ipu_idmac_read(ipu, IDMAC_CHA_EN(0));
2788 ipu->idma_enable_reg[1] = ipu_idmac_read(ipu, IDMAC_CHA_EN(32));
2789 while ((ipu_idmac_read(ipu, IDMAC_CHA_BUSY(0))
2790 & ipu->idma_enable_reg[0])
2791 || (ipu_idmac_read(ipu, IDMAC_CHA_BUSY(32))
2792 & ipu->idma_enable_reg[1])) {
2793 /* disable channel not busy already */
2794 uint32_t chan_should_disable, timeout = 1000, time = 0;
2796 chan_should_disable =
2797 ipu_idmac_read(ipu, IDMAC_CHA_BUSY(0))
2798 ^ ipu->idma_enable_reg[0];
2799 ipu_idmac_write(ipu, (~chan_should_disable) &
2800 ipu->idma_enable_reg[0], IDMAC_CHA_EN(0));
2801 chan_should_disable =
2802 ipu_idmac_read(ipu, IDMAC_CHA_BUSY(1))
2803 ^ ipu->idma_enable_reg[1];
2804 ipu_idmac_write(ipu, (~chan_should_disable) &
2805 ipu->idma_enable_reg[1], IDMAC_CHA_EN(32));
2808 if (time >= timeout)
2811 ipu_idmac_write(ipu, 0, IDMAC_CHA_EN(0));
2812 ipu_idmac_write(ipu, 0, IDMAC_CHA_EN(32));
2814 /* save double buffer select regs */
2815 ipu->cha_db_mode_reg[0] = ipu_cm_read(ipu, IPU_CHA_DB_MODE_SEL(0));
2816 ipu->cha_db_mode_reg[1] = ipu_cm_read(ipu, IPU_CHA_DB_MODE_SEL(32));
2817 ipu->cha_db_mode_reg[2] =
2818 ipu_cm_read(ipu, IPU_ALT_CHA_DB_MODE_SEL(0));
2819 ipu->cha_db_mode_reg[3] =
2820 ipu_cm_read(ipu, IPU_ALT_CHA_DB_MODE_SEL(32));
2822 /* save triple buffer select regs */
2823 ipu->cha_trb_mode_reg[0] = ipu_cm_read(ipu, IPU_CHA_TRB_MODE_SEL(0));
2824 ipu->cha_trb_mode_reg[1] = ipu_cm_read(ipu, IPU_CHA_TRB_MODE_SEL(32));
2826 /* save idamc sub addr regs */
2827 ipu->idma_sub_addr_reg[0] = ipu_idmac_read(ipu, IDMAC_SUB_ADDR_0);
2828 ipu->idma_sub_addr_reg[1] = ipu_idmac_read(ipu, IDMAC_SUB_ADDR_1);
2829 ipu->idma_sub_addr_reg[2] = ipu_idmac_read(ipu, IDMAC_SUB_ADDR_2);
2830 ipu->idma_sub_addr_reg[3] = ipu_idmac_read(ipu, IDMAC_SUB_ADDR_3);
2831 ipu->idma_sub_addr_reg[4] = ipu_idmac_read(ipu, IDMAC_SUB_ADDR_4);
2833 /* save sub-modules status and disable all */
2834 ipu->ic_conf_reg = ipu_ic_read(ipu, IC_CONF);
2835 ipu_ic_write(ipu, 0, IC_CONF);
2836 ipu->ipu_conf_reg = ipu_cm_read(ipu, IPU_CONF);
2837 ipu_cm_write(ipu, 0, IPU_CONF);
2839 /* save buf ready regs */
2840 ipu->buf_ready_reg[0] = ipu_cm_read(ipu, IPU_CHA_BUF0_RDY(0));
2841 ipu->buf_ready_reg[1] = ipu_cm_read(ipu, IPU_CHA_BUF0_RDY(32));
2842 ipu->buf_ready_reg[2] = ipu_cm_read(ipu, IPU_CHA_BUF1_RDY(0));
2843 ipu->buf_ready_reg[3] = ipu_cm_read(ipu, IPU_CHA_BUF1_RDY(32));
2844 ipu->buf_ready_reg[4] = ipu_cm_read(ipu, IPU_ALT_CHA_BUF0_RDY(0));
2845 ipu->buf_ready_reg[5] = ipu_cm_read(ipu, IPU_ALT_CHA_BUF0_RDY(32));
2846 ipu->buf_ready_reg[6] = ipu_cm_read(ipu, IPU_ALT_CHA_BUF1_RDY(0));
2847 ipu->buf_ready_reg[7] = ipu_cm_read(ipu, IPU_ALT_CHA_BUF1_RDY(32));
2848 ipu->buf_ready_reg[8] = ipu_cm_read(ipu, IPU_CHA_BUF2_RDY(0));
2849 ipu->buf_ready_reg[9] = ipu_cm_read(ipu, IPU_CHA_BUF2_RDY(32));
2858 static int ipu_resume(struct platform_device *pdev)
2860 struct imx_ipuv3_platform_data *plat_data = pdev->dev.platform_data;
2861 struct ipu_soc *ipu = platform_get_drvdata(pdev);
2866 if (atomic_read(&ipu->ipu_use_count)) {
2867 /* restore buf ready regs */
2868 ipu_cm_write(ipu, ipu->buf_ready_reg[0], IPU_CHA_BUF0_RDY(0));
2869 ipu_cm_write(ipu, ipu->buf_ready_reg[1], IPU_CHA_BUF0_RDY(32));
2870 ipu_cm_write(ipu, ipu->buf_ready_reg[2], IPU_CHA_BUF1_RDY(0));
2871 ipu_cm_write(ipu, ipu->buf_ready_reg[3], IPU_CHA_BUF1_RDY(32));
2872 ipu_cm_write(ipu, ipu->buf_ready_reg[4], IPU_ALT_CHA_BUF0_RDY(0));
2873 ipu_cm_write(ipu, ipu->buf_ready_reg[5], IPU_ALT_CHA_BUF0_RDY(32));
2874 ipu_cm_write(ipu, ipu->buf_ready_reg[6], IPU_ALT_CHA_BUF1_RDY(0));
2875 ipu_cm_write(ipu, ipu->buf_ready_reg[7], IPU_ALT_CHA_BUF1_RDY(32));
2876 ipu_cm_write(ipu, ipu->buf_ready_reg[8], IPU_CHA_BUF2_RDY(0));
2877 ipu_cm_write(ipu, ipu->buf_ready_reg[9], IPU_CHA_BUF2_RDY(32));
2879 /* re-enable sub-modules*/
2880 ipu_cm_write(ipu, ipu->ipu_conf_reg, IPU_CONF);
2881 ipu_ic_write(ipu, ipu->ic_conf_reg, IC_CONF);
2883 /* restore double buffer select regs */
2884 ipu_cm_write(ipu, ipu->cha_db_mode_reg[0], IPU_CHA_DB_MODE_SEL(0));
2885 ipu_cm_write(ipu, ipu->cha_db_mode_reg[1], IPU_CHA_DB_MODE_SEL(32));
2886 ipu_cm_write(ipu, ipu->cha_db_mode_reg[2],
2887 IPU_ALT_CHA_DB_MODE_SEL(0));
2888 ipu_cm_write(ipu, ipu->cha_db_mode_reg[3],
2889 IPU_ALT_CHA_DB_MODE_SEL(32));
2891 /* restore triple buffer select regs */
2892 ipu_cm_write(ipu, ipu->cha_trb_mode_reg[0], IPU_CHA_TRB_MODE_SEL(0));
2893 ipu_cm_write(ipu, ipu->cha_trb_mode_reg[1], IPU_CHA_TRB_MODE_SEL(32));
2895 /* restore idamc sub addr regs */
2896 ipu_idmac_write(ipu, ipu->idma_sub_addr_reg[0], IDMAC_SUB_ADDR_0);
2897 ipu_idmac_write(ipu, ipu->idma_sub_addr_reg[1], IDMAC_SUB_ADDR_1);
2898 ipu_idmac_write(ipu, ipu->idma_sub_addr_reg[2], IDMAC_SUB_ADDR_2);
2899 ipu_idmac_write(ipu, ipu->idma_sub_addr_reg[3], IDMAC_SUB_ADDR_3);
2900 ipu_idmac_write(ipu, ipu->idma_sub_addr_reg[4], IDMAC_SUB_ADDR_4);
2902 /* restart idma channel*/
2903 ipu_idmac_write(ipu, ipu->idma_enable_reg[0], IDMAC_CHA_EN(0));
2904 ipu_idmac_write(ipu, ipu->idma_enable_reg[1], IDMAC_CHA_EN(32));
2907 _ipu_dmfc_init(ipu, dmfc_type_setup, 1);
2908 _ipu_init_dc_mappings(ipu);
2909 /* Set sync refresh channels as high priority */
2910 ipu_idmac_write(ipu, 0x18800001L, IDMAC_CHA_PRI(0));
2918 * This structure contains pointers to the power management callback functions.
2920 static struct platform_driver mxcipu_driver = {
2922 .name = "imx-ipuv3",
2925 .remove = ipu_remove,
2926 .suspend = ipu_suspend,
2927 .resume = ipu_resume,
2930 int32_t __init ipu_gen_init(void)
2934 ret = platform_driver_register(&mxcipu_driver);
2938 subsys_initcall(ipu_gen_init);
2940 static void __exit ipu_gen_uninit(void)
2942 platform_driver_unregister(&mxcipu_driver);
2945 module_exit(ipu_gen_uninit);