2 * Copyright (C) 2010-2013 Freescale Semiconductor, Inc.
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License as published by
6 * the Free Software Foundation; either version 2 of the License, or
7 * (at your option) any later version.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, write to the Free Software
16 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 * Based on STMP378X PxP driver
21 * Copyright 2008-2009 Embedded Alley Solutions, Inc All Rights Reserved.
24 #include <linux/dma-mapping.h>
25 #include <linux/init.h>
26 #include <linux/interrupt.h>
28 #include <linux/kernel.h>
29 #include <linux/module.h>
30 #include <linux/mutex.h>
31 #include <linux/platform_device.h>
32 #include <linux/slab.h>
33 #include <linux/vmalloc.h>
34 #include <linux/dmaengine.h>
35 #include <linux/pxp_dma.h>
36 #include <linux/timer.h>
37 #include <linux/clk.h>
38 #include <linux/workqueue.h>
39 #include <linux/sched.h>
42 #include "regs-pxp_v2.h"
44 #define PXP_DOWNSCALE_THRESHOLD 0x4000
46 static LIST_HEAD(head);
47 static int timeout_in_ms = 600;
50 struct dma_device dma;
54 struct platform_device *pdev;
57 int irq; /* PXP IRQ to the CPU */
60 struct mutex clk_mutex;
62 #define CLK_STAT_OFF 0
68 struct pxp_dma pxp_dma;
69 struct pxp_channel channel[NR_PXP_VIRT_CHANNEL];
70 wait_queue_head_t done;
71 struct work_struct work;
73 /* describes most recent processing configuration */
74 struct pxp_config_data pxp_conf_state;
76 /* to turn clock off when pxp is inactive */
77 struct timer_list clk_timer;
80 #define to_pxp_dma(d) container_of(d, struct pxp_dma, dma)
81 #define to_tx_desc(tx) container_of(tx, struct pxp_tx_desc, txd)
82 #define to_pxp_channel(d) container_of(d, struct pxp_channel, dma_chan)
83 #define to_pxp(id) container_of(id, struct pxps, pxp_dma)
85 #define PXP_DEF_BUFS 2
88 #define PXP_WAITCON ((__raw_readl(pxp->base + HW_PXP_STAT) & \
89 BM_PXP_STAT_IRQ) != BM_PXP_STAT_IRQ)
91 static uint32_t pxp_s0_formats[] = {
100 * PXP common functions
102 static void dump_pxp_reg(struct pxps *pxp)
104 dev_dbg(pxp->dev, "PXP_CTRL 0x%x",
105 __raw_readl(pxp->base + HW_PXP_CTRL));
106 dev_dbg(pxp->dev, "PXP_STAT 0x%x",
107 __raw_readl(pxp->base + HW_PXP_STAT));
108 dev_dbg(pxp->dev, "PXP_OUT_CTRL 0x%x",
109 __raw_readl(pxp->base + HW_PXP_OUT_CTRL));
110 dev_dbg(pxp->dev, "PXP_OUT_BUF 0x%x",
111 __raw_readl(pxp->base + HW_PXP_OUT_BUF));
112 dev_dbg(pxp->dev, "PXP_OUT_BUF2 0x%x",
113 __raw_readl(pxp->base + HW_PXP_OUT_BUF2));
114 dev_dbg(pxp->dev, "PXP_OUT_PITCH 0x%x",
115 __raw_readl(pxp->base + HW_PXP_OUT_PITCH));
116 dev_dbg(pxp->dev, "PXP_OUT_LRC 0x%x",
117 __raw_readl(pxp->base + HW_PXP_OUT_LRC));
118 dev_dbg(pxp->dev, "PXP_OUT_PS_ULC 0x%x",
119 __raw_readl(pxp->base + HW_PXP_OUT_PS_ULC));
120 dev_dbg(pxp->dev, "PXP_OUT_PS_LRC 0x%x",
121 __raw_readl(pxp->base + HW_PXP_OUT_PS_LRC));
122 dev_dbg(pxp->dev, "PXP_OUT_AS_ULC 0x%x",
123 __raw_readl(pxp->base + HW_PXP_OUT_AS_ULC));
124 dev_dbg(pxp->dev, "PXP_OUT_AS_LRC 0x%x",
125 __raw_readl(pxp->base + HW_PXP_OUT_AS_LRC));
126 dev_dbg(pxp->dev, "PXP_PS_CTRL 0x%x",
127 __raw_readl(pxp->base + HW_PXP_PS_CTRL));
128 dev_dbg(pxp->dev, "PXP_PS_BUF 0x%x",
129 __raw_readl(pxp->base + HW_PXP_PS_BUF));
130 dev_dbg(pxp->dev, "PXP_PS_UBUF 0x%x",
131 __raw_readl(pxp->base + HW_PXP_PS_UBUF));
132 dev_dbg(pxp->dev, "PXP_PS_VBUF 0x%x",
133 __raw_readl(pxp->base + HW_PXP_PS_VBUF));
134 dev_dbg(pxp->dev, "PXP_PS_PITCH 0x%x",
135 __raw_readl(pxp->base + HW_PXP_PS_PITCH));
136 dev_dbg(pxp->dev, "PXP_PS_BACKGROUND 0x%x",
137 __raw_readl(pxp->base + HW_PXP_PS_BACKGROUND));
138 dev_dbg(pxp->dev, "PXP_PS_SCALE 0x%x",
139 __raw_readl(pxp->base + HW_PXP_PS_SCALE));
140 dev_dbg(pxp->dev, "PXP_PS_OFFSET 0x%x",
141 __raw_readl(pxp->base + HW_PXP_PS_OFFSET));
142 dev_dbg(pxp->dev, "PXP_PS_CLRKEYLOW 0x%x",
143 __raw_readl(pxp->base + HW_PXP_PS_CLRKEYLOW));
144 dev_dbg(pxp->dev, "PXP_PS_CLRKEYHIGH 0x%x",
145 __raw_readl(pxp->base + HW_PXP_PS_CLRKEYHIGH));
146 dev_dbg(pxp->dev, "PXP_AS_CTRL 0x%x",
147 __raw_readl(pxp->base + HW_PXP_AS_CTRL));
148 dev_dbg(pxp->dev, "PXP_AS_BUF 0x%x",
149 __raw_readl(pxp->base + HW_PXP_AS_BUF));
150 dev_dbg(pxp->dev, "PXP_AS_PITCH 0x%x",
151 __raw_readl(pxp->base + HW_PXP_AS_PITCH));
152 dev_dbg(pxp->dev, "PXP_AS_CLRKEYLOW 0x%x",
153 __raw_readl(pxp->base + HW_PXP_AS_CLRKEYLOW));
154 dev_dbg(pxp->dev, "PXP_AS_CLRKEYHIGH 0x%x",
155 __raw_readl(pxp->base + HW_PXP_AS_CLRKEYHIGH));
156 dev_dbg(pxp->dev, "PXP_CSC1_COEF0 0x%x",
157 __raw_readl(pxp->base + HW_PXP_CSC1_COEF0));
158 dev_dbg(pxp->dev, "PXP_CSC1_COEF1 0x%x",
159 __raw_readl(pxp->base + HW_PXP_CSC1_COEF1));
160 dev_dbg(pxp->dev, "PXP_CSC1_COEF2 0x%x",
161 __raw_readl(pxp->base + HW_PXP_CSC1_COEF2));
162 dev_dbg(pxp->dev, "PXP_CSC2_CTRL 0x%x",
163 __raw_readl(pxp->base + HW_PXP_CSC2_CTRL));
164 dev_dbg(pxp->dev, "PXP_CSC2_COEF0 0x%x",
165 __raw_readl(pxp->base + HW_PXP_CSC2_COEF0));
166 dev_dbg(pxp->dev, "PXP_CSC2_COEF1 0x%x",
167 __raw_readl(pxp->base + HW_PXP_CSC2_COEF1));
168 dev_dbg(pxp->dev, "PXP_CSC2_COEF2 0x%x",
169 __raw_readl(pxp->base + HW_PXP_CSC2_COEF2));
170 dev_dbg(pxp->dev, "PXP_CSC2_COEF3 0x%x",
171 __raw_readl(pxp->base + HW_PXP_CSC2_COEF3));
172 dev_dbg(pxp->dev, "PXP_CSC2_COEF4 0x%x",
173 __raw_readl(pxp->base + HW_PXP_CSC2_COEF4));
174 dev_dbg(pxp->dev, "PXP_CSC2_COEF5 0x%x",
175 __raw_readl(pxp->base + HW_PXP_CSC2_COEF5));
176 dev_dbg(pxp->dev, "PXP_LUT_CTRL 0x%x",
177 __raw_readl(pxp->base + HW_PXP_LUT_CTRL));
178 dev_dbg(pxp->dev, "PXP_LUT_ADDR 0x%x",
179 __raw_readl(pxp->base + HW_PXP_LUT_ADDR));
180 dev_dbg(pxp->dev, "PXP_LUT_DATA 0x%x",
181 __raw_readl(pxp->base + HW_PXP_LUT_DATA));
182 dev_dbg(pxp->dev, "PXP_LUT_EXTMEM 0x%x",
183 __raw_readl(pxp->base + HW_PXP_LUT_EXTMEM));
184 dev_dbg(pxp->dev, "PXP_CFA 0x%x",
185 __raw_readl(pxp->base + HW_PXP_CFA));
186 dev_dbg(pxp->dev, "PXP_HIST_CTRL 0x%x",
187 __raw_readl(pxp->base + HW_PXP_HIST_CTRL));
188 dev_dbg(pxp->dev, "PXP_HIST2_PARAM 0x%x",
189 __raw_readl(pxp->base + HW_PXP_HIST2_PARAM));
190 dev_dbg(pxp->dev, "PXP_HIST4_PARAM 0x%x",
191 __raw_readl(pxp->base + HW_PXP_HIST4_PARAM));
192 dev_dbg(pxp->dev, "PXP_HIST8_PARAM0 0x%x",
193 __raw_readl(pxp->base + HW_PXP_HIST8_PARAM0));
194 dev_dbg(pxp->dev, "PXP_HIST8_PARAM1 0x%x",
195 __raw_readl(pxp->base + HW_PXP_HIST8_PARAM1));
196 dev_dbg(pxp->dev, "PXP_HIST16_PARAM0 0x%x",
197 __raw_readl(pxp->base + HW_PXP_HIST16_PARAM0));
198 dev_dbg(pxp->dev, "PXP_HIST16_PARAM1 0x%x",
199 __raw_readl(pxp->base + HW_PXP_HIST16_PARAM1));
200 dev_dbg(pxp->dev, "PXP_HIST16_PARAM2 0x%x",
201 __raw_readl(pxp->base + HW_PXP_HIST16_PARAM2));
202 dev_dbg(pxp->dev, "PXP_HIST16_PARAM3 0x%x",
203 __raw_readl(pxp->base + HW_PXP_HIST16_PARAM3));
204 dev_dbg(pxp->dev, "PXP_POWER 0x%x",
205 __raw_readl(pxp->base + HW_PXP_POWER));
206 dev_dbg(pxp->dev, "PXP_NEXT 0x%x",
207 __raw_readl(pxp->base + HW_PXP_NEXT));
208 dev_dbg(pxp->dev, "PXP_DEBUGCTRL 0x%x",
209 __raw_readl(pxp->base + HW_PXP_DEBUGCTRL));
210 dev_dbg(pxp->dev, "PXP_DEBUG 0x%x",
211 __raw_readl(pxp->base + HW_PXP_DEBUG));
212 dev_dbg(pxp->dev, "PXP_VERSION 0x%x",
213 __raw_readl(pxp->base + HW_PXP_VERSION));
216 static bool is_yuv(u32 pix_fmt)
218 if ((pix_fmt == PXP_PIX_FMT_YUYV) |
219 (pix_fmt == PXP_PIX_FMT_UYVY) |
220 (pix_fmt == PXP_PIX_FMT_Y41P) |
221 (pix_fmt == PXP_PIX_FMT_YUV444) |
222 (pix_fmt == PXP_PIX_FMT_NV12) |
223 (pix_fmt == PXP_PIX_FMT_GREY) |
224 (pix_fmt == PXP_PIX_FMT_GY04) |
225 (pix_fmt == PXP_PIX_FMT_YVU410P) |
226 (pix_fmt == PXP_PIX_FMT_YUV410P) |
227 (pix_fmt == PXP_PIX_FMT_YVU420P) |
228 (pix_fmt == PXP_PIX_FMT_YUV420P) |
229 (pix_fmt == PXP_PIX_FMT_YUV420P2) |
230 (pix_fmt == PXP_PIX_FMT_YVU422P) |
231 (pix_fmt == PXP_PIX_FMT_YUV422P)) {
238 static void pxp_set_ctrl(struct pxps *pxp)
240 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
241 struct pxp_proc_data *proc_data = &pxp_conf->proc_data;
245 /* Configure S0 input format */
246 switch (pxp_conf->s0_param.pixel_fmt) {
247 case PXP_PIX_FMT_RGB24:
248 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__RGB888;
250 case PXP_PIX_FMT_RGB565:
251 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__RGB565;
253 case PXP_PIX_FMT_RGB555:
254 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__RGB555;
256 case PXP_PIX_FMT_YUV420P:
257 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__YUV420;
259 case PXP_PIX_FMT_GREY:
260 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__Y8;
262 case PXP_PIX_FMT_GY04:
263 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__Y4;
265 case PXP_PIX_FMT_YUV422P:
266 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__YUV422;
268 case PXP_PIX_FMT_UYVY:
269 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__UYVY1P422;
275 ctrl = BF_PXP_PS_CTRL_FORMAT(fmt_ctrl);
276 __raw_writel(ctrl, pxp->base + HW_PXP_PS_CTRL);
278 /* Configure output format based on out_channel format */
279 switch (pxp_conf->out_param.pixel_fmt) {
280 case PXP_PIX_FMT_RGB24:
281 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__RGB888;
283 case PXP_PIX_FMT_RGB565:
284 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__RGB565;
286 case PXP_PIX_FMT_RGB555:
287 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__RGB555;
289 case PXP_PIX_FMT_YUV420P:
290 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__YUV2P420;
292 case PXP_PIX_FMT_YUV422P:
293 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__YUV2P422;
295 case PXP_PIX_FMT_GREY:
296 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__Y8;
298 case PXP_PIX_FMT_GY04:
299 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__Y4;
305 ctrl = BF_PXP_OUT_CTRL_FORMAT(fmt_ctrl);
306 __raw_writel(ctrl, pxp->base + HW_PXP_OUT_CTRL);
309 if (proc_data->scaling)
311 if (proc_data->vflip)
312 ctrl |= BM_PXP_CTRL_VFLIP;
313 if (proc_data->hflip)
314 ctrl |= BM_PXP_CTRL_HFLIP;
315 if (proc_data->rotate)
316 ctrl |= BF_PXP_CTRL_ROTATE(proc_data->rotate / 90);
318 /* H/W support: controls where rotation will occur in the PXP datapath,
319 * will export an interfance if needed
321 /* ctrl |= BM_PXP_CTRL_ROT_POS; post rotation */
323 __raw_writel(ctrl, pxp->base + HW_PXP_CTRL);
326 static int pxp_start(struct pxps *pxp)
328 __raw_writel(BM_PXP_CTRL_IRQ_ENABLE, pxp->base + HW_PXP_CTRL_SET);
329 __raw_writel(BM_PXP_CTRL_ENABLE, pxp->base + HW_PXP_CTRL_SET);
335 static void pxp_set_outbuf(struct pxps *pxp)
337 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
338 struct pxp_layer_param *out_params = &pxp_conf->out_param;
340 __raw_writel(out_params->paddr, pxp->base + HW_PXP_OUT_BUF);
342 __raw_writel(BF_PXP_OUT_LRC_X(out_params->width - 1) |
343 BF_PXP_OUT_LRC_Y(out_params->height - 1),
344 pxp->base + HW_PXP_OUT_LRC);
346 if (out_params->pixel_fmt == PXP_PIX_FMT_RGB24)
347 __raw_writel(out_params->stride << 2,
348 pxp->base + HW_PXP_OUT_PITCH);
349 else if (out_params->pixel_fmt == PXP_PIX_FMT_RGB565)
350 __raw_writel(out_params->stride << 1,
351 pxp->base + HW_PXP_OUT_PITCH);
353 __raw_writel(out_params->stride, pxp->base + HW_PXP_OUT_PITCH);
356 static void pxp_set_s0colorkey(struct pxps *pxp)
358 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
359 struct pxp_layer_param *s0_params = &pxp_conf->s0_param;
361 /* Low and high are set equal. V4L does not allow a chromakey range */
362 if (s0_params->color_key == -1) {
363 /* disable color key */
364 __raw_writel(0xFFFFFF, pxp->base + HW_PXP_PS_CLRKEYLOW);
365 __raw_writel(0, pxp->base + HW_PXP_PS_CLRKEYHIGH);
367 __raw_writel(s0_params->color_key,
368 pxp->base + HW_PXP_PS_CLRKEYLOW);
369 __raw_writel(s0_params->color_key,
370 pxp->base + HW_PXP_PS_CLRKEYHIGH);
374 static void pxp_set_olcolorkey(int layer_no, struct pxps *pxp)
376 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
377 struct pxp_layer_param *ol_params = &pxp_conf->ol_param[layer_no];
379 /* Low and high are set equal. V4L does not allow a chromakey range */
380 if (ol_params->color_key_enable != 0 && ol_params->color_key != -1) {
381 __raw_writel(ol_params->color_key,
382 pxp->base + HW_PXP_AS_CLRKEYLOW);
383 __raw_writel(ol_params->color_key,
384 pxp->base + HW_PXP_AS_CLRKEYHIGH);
386 /* disable color key */
387 __raw_writel(0xFFFFFF, pxp->base + HW_PXP_AS_CLRKEYLOW);
388 __raw_writel(0, pxp->base + HW_PXP_AS_CLRKEYHIGH);
392 static void pxp_set_oln(int layer_no, struct pxps *pxp)
394 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
395 struct pxp_layer_param *olparams_data = &pxp_conf->ol_param[layer_no];
396 dma_addr_t phys_addr = olparams_data->paddr;
397 __raw_writel(phys_addr, pxp->base + HW_PXP_AS_BUF);
399 if (olparams_data->combine_enable) {
400 __raw_writel(0x0, pxp->base + HW_PXP_OUT_AS_ULC);
401 __raw_writel(BF_PXP_OUT_AS_LRC_X(olparams_data->width - 1) |
402 BF_PXP_OUT_AS_LRC_Y(olparams_data->height - 1),
403 pxp->base + HW_PXP_OUT_AS_LRC);
405 /* will not fetch data from AS if ULC is at left top of LRC */
406 __raw_writel(0xFFFFFFFF, pxp->base + HW_PXP_OUT_AS_ULC);
407 __raw_writel(0x0, pxp->base + HW_PXP_OUT_AS_LRC);
410 if (olparams_data->pixel_fmt == PXP_PIX_FMT_RGB24)
411 __raw_writel(olparams_data->width << 2,
412 pxp->base + HW_PXP_AS_PITCH);
414 __raw_writel(olparams_data->width << 1,
415 pxp->base + HW_PXP_AS_PITCH);
418 static void pxp_set_olparam(int layer_no, struct pxps *pxp)
420 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
421 struct pxp_layer_param *olparams_data = &pxp_conf->ol_param[layer_no];
424 olparam = BF_PXP_AS_CTRL_ALPHA(olparams_data->global_alpha);
425 if (olparams_data->pixel_fmt == PXP_PIX_FMT_RGB24)
427 BF_PXP_AS_CTRL_FORMAT(BV_PXP_AS_CTRL_FORMAT__RGB888);
430 BF_PXP_AS_CTRL_FORMAT(BV_PXP_AS_CTRL_FORMAT__RGB565);
431 if (olparams_data->global_alpha_enable)
433 BF_PXP_AS_CTRL_ALPHA_CTRL
434 (BV_PXP_AS_CTRL_ALPHA_CTRL__Override);
435 if (olparams_data->color_key_enable)
436 olparam |= BM_PXP_AS_CTRL_ENABLE_COLORKEY;
438 __raw_writel(olparam, pxp->base + HW_PXP_AS_CTRL);
441 static void pxp_set_s0param(struct pxps *pxp)
443 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
444 struct pxp_proc_data *proc_data = &pxp_conf->proc_data;
447 /* contains the coordinate for the PS in the OUTPUT buffer. */
448 s0param = BF_PXP_OUT_PS_ULC_X(proc_data->drect.left);
449 s0param |= BF_PXP_OUT_PS_ULC_Y(proc_data->drect.top);
450 __raw_writel(s0param, pxp->base + HW_PXP_OUT_PS_ULC);
451 s0param = BF_PXP_OUT_PS_LRC_X(proc_data->drect.left +
452 proc_data->drect.width - 1);
453 s0param |= BF_PXP_OUT_PS_LRC_Y(proc_data->drect.top +
454 proc_data->drect.height - 1);
455 __raw_writel(s0param, pxp->base + HW_PXP_OUT_PS_LRC);
458 /* crop behavior is re-designed in h/w. */
459 static void pxp_set_s0crop(struct pxps *pxp)
462 * place-holder, it's implemented in other functions in this driver.
463 * Refer to "Clipping source images" section in RM for detail.
467 static int pxp_set_scaling(struct pxps *pxp)
470 u32 xscale, yscale, s0scale;
471 struct pxp_proc_data *proc_data = &pxp->pxp_conf_state.proc_data;
473 if ((proc_data->srect.width == proc_data->drect.width) &&
474 (proc_data->srect.height == proc_data->drect.height)) {
475 proc_data->scaling = 0;
476 __raw_writel(0x10001000, pxp->base + HW_PXP_PS_SCALE);
480 proc_data->scaling = 1;
481 xscale = proc_data->srect.width * 0x1000 / proc_data->drect.width;
482 yscale = proc_data->srect.height * 0x1000 / proc_data->drect.height;
483 if (xscale > PXP_DOWNSCALE_THRESHOLD)
484 xscale = PXP_DOWNSCALE_THRESHOLD;
485 if (yscale > PXP_DOWNSCALE_THRESHOLD)
486 yscale = PXP_DOWNSCALE_THRESHOLD;
487 s0scale = BF_PXP_PS_SCALE_YSCALE(yscale) |
488 BF_PXP_PS_SCALE_XSCALE(xscale);
489 __raw_writel(s0scale, pxp->base + HW_PXP_PS_SCALE);
497 static void pxp_set_bg(struct pxps *pxp)
499 __raw_writel(pxp->pxp_conf_state.proc_data.bgcolor,
500 pxp->base + HW_PXP_PS_BACKGROUND);
503 static void pxp_set_lut(struct pxps *pxp)
505 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
506 int lut_op = pxp_conf->proc_data.lut_transform;
509 bool use_cmap = (lut_op & PXP_LUT_USE_CMAP) ? true : false;
510 u8 *cmap = pxp_conf->proc_data.lut_map;
516 * If LUT already configured as needed, return...
517 * Unless CMAP is needed and it has been updated.
519 if ((pxp->lut_state == lut_op) &&
520 !(use_cmap && pxp_conf->proc_data.lut_map_updated))
523 if (lut_op == PXP_LUT_NONE) {
524 __raw_writel(BM_PXP_LUT_CTRL_BYPASS,
525 pxp->base + HW_PXP_LUT_CTRL);
526 } else if (((lut_op & PXP_LUT_INVERT) != 0)
527 && ((lut_op & PXP_LUT_BLACK_WHITE) != 0)) {
528 /* Fill out LUT table with inverted monochromized values */
530 /* clear bypass bit, set lookup mode & out mode */
531 __raw_writel(BF_PXP_LUT_CTRL_LOOKUP_MODE
532 (BV_PXP_LUT_CTRL_LOOKUP_MODE__DIRECT_Y8) |
533 BF_PXP_LUT_CTRL_OUT_MODE
534 (BV_PXP_LUT_CTRL_OUT_MODE__Y8),
535 pxp->base + HW_PXP_LUT_CTRL);
537 /* Initialize LUT address to 0 and set NUM_BYTES to 0 */
538 __raw_writel(0, pxp->base + HW_PXP_LUT_ADDR);
540 /* LUT address pointer auto-increments after each data write */
541 for (pix_val = 0; pix_val < 256; pix_val += 4) {
542 for (i = 0; i < 4; i++) {
543 entry_src = use_cmap ?
544 cmap[pix_val + i] : pix_val + i;
545 entry[i] = (entry_src < 0x80) ? 0xFF : 0x00;
547 reg_val = (entry[3] << 24) | (entry[2] << 16) |
548 (entry[1] << 8) | entry[0];
549 __raw_writel(reg_val, pxp->base + HW_PXP_LUT_DATA);
551 } else if ((lut_op & PXP_LUT_INVERT) != 0) {
552 /* Fill out LUT table with 8-bit inverted values */
554 /* clear bypass bit, set lookup mode & out mode */
555 __raw_writel(BF_PXP_LUT_CTRL_LOOKUP_MODE
556 (BV_PXP_LUT_CTRL_LOOKUP_MODE__DIRECT_Y8) |
557 BF_PXP_LUT_CTRL_OUT_MODE
558 (BV_PXP_LUT_CTRL_OUT_MODE__Y8),
559 pxp->base + HW_PXP_LUT_CTRL);
561 /* Initialize LUT address to 0 and set NUM_BYTES to 0 */
562 __raw_writel(0, pxp->base + HW_PXP_LUT_ADDR);
564 /* LUT address pointer auto-increments after each data write */
565 for (pix_val = 0; pix_val < 256; pix_val += 4) {
566 for (i = 0; i < 4; i++) {
567 entry_src = use_cmap ?
568 cmap[pix_val + i] : pix_val + i;
569 entry[i] = ~entry_src & 0xFF;
571 reg_val = (entry[3] << 24) | (entry[2] << 16) |
572 (entry[1] << 8) | entry[0];
573 __raw_writel(reg_val, pxp->base + HW_PXP_LUT_DATA);
575 } else if ((lut_op & PXP_LUT_BLACK_WHITE) != 0) {
576 /* Fill out LUT table with 8-bit monochromized values */
578 /* clear bypass bit, set lookup mode & out mode */
579 __raw_writel(BF_PXP_LUT_CTRL_LOOKUP_MODE
580 (BV_PXP_LUT_CTRL_LOOKUP_MODE__DIRECT_Y8) |
581 BF_PXP_LUT_CTRL_OUT_MODE
582 (BV_PXP_LUT_CTRL_OUT_MODE__Y8),
583 pxp->base + HW_PXP_LUT_CTRL);
585 /* Initialize LUT address to 0 and set NUM_BYTES to 0 */
586 __raw_writel(0, pxp->base + HW_PXP_LUT_ADDR);
588 /* LUT address pointer auto-increments after each data write */
589 for (pix_val = 0; pix_val < 256; pix_val += 4) {
590 for (i = 0; i < 4; i++) {
591 entry_src = use_cmap ?
592 cmap[pix_val + i] : pix_val + i;
593 entry[i] = (entry_src < 0x80) ? 0x00 : 0xFF;
595 reg_val = (entry[3] << 24) | (entry[2] << 16) |
596 (entry[1] << 8) | entry[0];
597 __raw_writel(reg_val, pxp->base + HW_PXP_LUT_DATA);
599 } else if (use_cmap) {
600 /* Fill out LUT table using colormap values */
602 /* clear bypass bit, set lookup mode & out mode */
603 __raw_writel(BF_PXP_LUT_CTRL_LOOKUP_MODE
604 (BV_PXP_LUT_CTRL_LOOKUP_MODE__DIRECT_Y8) |
605 BF_PXP_LUT_CTRL_OUT_MODE
606 (BV_PXP_LUT_CTRL_OUT_MODE__Y8),
607 pxp->base + HW_PXP_LUT_CTRL);
609 /* Initialize LUT address to 0 and set NUM_BYTES to 0 */
610 __raw_writel(0, pxp->base + HW_PXP_LUT_ADDR);
612 /* LUT address pointer auto-increments after each data write */
613 for (pix_val = 0; pix_val < 256; pix_val += 4) {
614 for (i = 0; i < 4; i++)
615 entry[i] = cmap[pix_val + i];
616 reg_val = (entry[3] << 24) | (entry[2] << 16) |
617 (entry[1] << 8) | entry[0];
618 __raw_writel(reg_val, pxp->base + HW_PXP_LUT_DATA);
622 pxp->lut_state = lut_op;
625 static void pxp_set_csc(struct pxps *pxp)
627 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
628 struct pxp_layer_param *s0_params = &pxp_conf->s0_param;
629 struct pxp_layer_param *ol_params = &pxp_conf->ol_param[0];
630 struct pxp_layer_param *out_params = &pxp_conf->out_param;
632 bool input_is_YUV = is_yuv(s0_params->pixel_fmt);
633 bool output_is_YUV = is_yuv(out_params->pixel_fmt);
635 if (input_is_YUV && output_is_YUV) {
637 * Input = YUV, Output = YUV
638 * No CSC unless we need to do combining
640 if (ol_params->combine_enable) {
641 /* Must convert to RGB for combining with RGB overlay */
643 /* CSC1 - YUV->RGB */
644 __raw_writel(0x04030000, pxp->base + HW_PXP_CSC1_COEF0);
645 __raw_writel(0x01230208, pxp->base + HW_PXP_CSC1_COEF1);
646 __raw_writel(0x076b079c, pxp->base + HW_PXP_CSC1_COEF2);
648 /* CSC2 - RGB->YUV */
649 __raw_writel(0x4, pxp->base + HW_PXP_CSC2_CTRL);
650 __raw_writel(0x0096004D, pxp->base + HW_PXP_CSC2_COEF0);
651 __raw_writel(0x05DA001D, pxp->base + HW_PXP_CSC2_COEF1);
652 __raw_writel(0x007005B6, pxp->base + HW_PXP_CSC2_COEF2);
653 __raw_writel(0x057C009E, pxp->base + HW_PXP_CSC2_COEF3);
654 __raw_writel(0x000005E6, pxp->base + HW_PXP_CSC2_COEF4);
655 __raw_writel(0x00000000, pxp->base + HW_PXP_CSC2_COEF5);
657 /* Input & Output both YUV, so bypass both CSCs */
660 __raw_writel(0x40000000, pxp->base + HW_PXP_CSC1_COEF0);
663 __raw_writel(0x1, pxp->base + HW_PXP_CSC2_CTRL);
665 } else if (input_is_YUV && !output_is_YUV) {
667 * Input = YUV, Output = RGB
668 * Use CSC1 to convert to RGB
671 /* CSC1 - YUV->RGB */
672 __raw_writel(0x84ab01f0, pxp->base + HW_PXP_CSC1_COEF0);
673 __raw_writel(0x01980204, pxp->base + HW_PXP_CSC1_COEF1);
674 __raw_writel(0x0730079c, pxp->base + HW_PXP_CSC1_COEF2);
677 __raw_writel(0x1, pxp->base + HW_PXP_CSC2_CTRL);
678 } else if (!input_is_YUV && output_is_YUV) {
680 * Input = RGB, Output = YUV
681 * Use CSC2 to convert to YUV
685 __raw_writel(0x40000000, pxp->base + HW_PXP_CSC1_COEF0);
687 /* CSC2 - RGB->YUV */
688 __raw_writel(0x4, pxp->base + HW_PXP_CSC2_CTRL);
689 __raw_writel(0x0096004D, pxp->base + HW_PXP_CSC2_COEF0);
690 __raw_writel(0x05DA001D, pxp->base + HW_PXP_CSC2_COEF1);
691 __raw_writel(0x007005B6, pxp->base + HW_PXP_CSC2_COEF2);
692 __raw_writel(0x057C009E, pxp->base + HW_PXP_CSC2_COEF3);
693 __raw_writel(0x000005E6, pxp->base + HW_PXP_CSC2_COEF4);
694 __raw_writel(0x00000000, pxp->base + HW_PXP_CSC2_COEF5);
697 * Input = RGB, Output = RGB
698 * Input & Output both RGB, so bypass both CSCs
702 __raw_writel(0x40000000, pxp->base + HW_PXP_CSC1_COEF0);
705 __raw_writel(0x1, pxp->base + HW_PXP_CSC2_CTRL);
708 /* YCrCb colorspace */
709 /* Not sure when we use this...no YCrCb formats are defined for PxP */
711 __raw_writel(0x84ab01f0, HW_PXP_CSCCOEFF0_ADDR);
712 __raw_writel(0x01230204, HW_PXP_CSCCOEFF1_ADDR);
713 __raw_writel(0x0730079c, HW_PXP_CSCCOEFF2_ADDR);
718 static void pxp_set_s0buf(struct pxps *pxp)
720 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
721 struct pxp_layer_param *s0_params = &pxp_conf->s0_param;
722 struct pxp_proc_data *proc_data = &pxp_conf->proc_data;
724 dma_addr_t Y1, U1, V1;
727 Y = s0_params->paddr;
729 if (s0_params->pixel_fmt == PXP_PIX_FMT_RGB565)
731 else if (s0_params->pixel_fmt == PXP_PIX_FMT_RGB24)
733 offset = (proc_data->srect.top * s0_params->width +
734 proc_data->srect.left) * bpp;
735 /* clipping or cropping */
737 __raw_writel(Y1, pxp->base + HW_PXP_PS_BUF);
738 if ((s0_params->pixel_fmt == PXP_PIX_FMT_YUV420P) ||
739 (s0_params->pixel_fmt == PXP_PIX_FMT_YVU420P) ||
740 (s0_params->pixel_fmt == PXP_PIX_FMT_GREY)) {
741 /* Set to 1 if YUV format is 4:2:2 rather than 4:2:0 */
744 offset = proc_data->srect.top * s0_params->width / 4 +
745 proc_data->srect.left / 2;
746 U = Y + (s0_params->width * s0_params->height);
748 V = U + ((s0_params->width * s0_params->height) >> s);
750 __raw_writel(U1, pxp->base + HW_PXP_PS_UBUF);
751 __raw_writel(V1, pxp->base + HW_PXP_PS_VBUF);
754 /* TODO: only support RGB565, Y8, Y4, YUV420 */
755 if (s0_params->pixel_fmt == PXP_PIX_FMT_GREY ||
756 s0_params->pixel_fmt == PXP_PIX_FMT_YUV420P)
757 __raw_writel(s0_params->width, pxp->base + HW_PXP_PS_PITCH);
758 else if (s0_params->pixel_fmt == PXP_PIX_FMT_GY04)
759 __raw_writel(s0_params->width >> 1,
760 pxp->base + HW_PXP_PS_PITCH);
762 __raw_writel(s0_params->width * 2, pxp->base + HW_PXP_PS_PITCH);
766 * pxp_config() - configure PxP for a processing task
767 * @pxps: PXP context.
768 * @pxp_chan: PXP channel.
769 * @return: 0 on success or negative error code on failure.
771 static int pxp_config(struct pxps *pxp, struct pxp_channel *pxp_chan)
773 struct pxp_config_data *pxp_conf_data = &pxp->pxp_conf_state;
777 /* Configure PxP regs */
779 pxp_set_s0param(pxp);
781 pxp_set_scaling(pxp);
782 ol_nr = pxp_conf_data->layer_nr - 2;
784 i = pxp_conf_data->layer_nr - 2 - ol_nr;
786 pxp_set_olparam(i, pxp);
787 /* only the color key in higher overlay will take effect. */
788 pxp_set_olcolorkey(i, pxp);
791 pxp_set_s0colorkey(pxp);
802 static void pxp_clk_enable(struct pxps *pxp)
804 mutex_lock(&pxp->clk_mutex);
806 if (pxp->clk_stat == CLK_STAT_ON) {
807 mutex_unlock(&pxp->clk_mutex);
811 clk_prepare_enable(pxp->clk);
812 pxp->clk_stat = CLK_STAT_ON;
814 mutex_unlock(&pxp->clk_mutex);
817 static void pxp_clk_disable(struct pxps *pxp)
821 mutex_lock(&pxp->clk_mutex);
823 if (pxp->clk_stat == CLK_STAT_OFF) {
824 mutex_unlock(&pxp->clk_mutex);
828 spin_lock_irqsave(&pxp->lock, flags);
829 if ((pxp->pxp_ongoing == 0) && list_empty(&head)) {
830 spin_unlock_irqrestore(&pxp->lock, flags);
831 clk_disable_unprepare(pxp->clk);
832 pxp->clk_stat = CLK_STAT_OFF;
834 spin_unlock_irqrestore(&pxp->lock, flags);
836 mutex_unlock(&pxp->clk_mutex);
839 static inline void clkoff_callback(struct work_struct *w)
841 struct pxps *pxp = container_of(w, struct pxps, work);
843 pxp_clk_disable(pxp);
846 static void pxp_clkoff_timer(unsigned long arg)
848 struct pxps *pxp = (struct pxps *)arg;
850 if ((pxp->pxp_ongoing == 0) && list_empty(&head))
851 schedule_work(&pxp->work);
853 mod_timer(&pxp->clk_timer,
854 jiffies + msecs_to_jiffies(timeout_in_ms));
857 static struct pxp_tx_desc *pxpdma_first_active(struct pxp_channel *pxp_chan)
859 return list_entry(pxp_chan->active_list.next, struct pxp_tx_desc, list);
862 static struct pxp_tx_desc *pxpdma_first_queued(struct pxp_channel *pxp_chan)
864 return list_entry(pxp_chan->queue.next, struct pxp_tx_desc, list);
867 /* called with pxp_chan->lock held */
868 static void __pxpdma_dostart(struct pxp_channel *pxp_chan)
870 struct pxp_dma *pxp_dma = to_pxp_dma(pxp_chan->dma_chan.device);
871 struct pxps *pxp = to_pxp(pxp_dma);
872 struct pxp_tx_desc *desc;
873 struct pxp_tx_desc *child;
876 /* so far we presume only one transaction on active_list */
878 desc = pxpdma_first_active(pxp_chan);
879 memcpy(&pxp->pxp_conf_state.s0_param,
880 &desc->layer_param.s0_param, sizeof(struct pxp_layer_param));
881 memcpy(&pxp->pxp_conf_state.proc_data,
882 &desc->proc_data, sizeof(struct pxp_proc_data));
884 /* Save PxP configuration */
885 list_for_each_entry(child, &desc->tx_list, list) {
886 if (i == 0) { /* Output */
887 memcpy(&pxp->pxp_conf_state.out_param,
888 &child->layer_param.out_param,
889 sizeof(struct pxp_layer_param));
890 } else { /* Overlay */
891 memcpy(&pxp->pxp_conf_state.ol_param[i - 1],
892 &child->layer_param.ol_param,
893 sizeof(struct pxp_layer_param));
898 pr_debug("%s:%d S0 w/h %d/%d paddr %08x\n", __func__, __LINE__,
899 pxp->pxp_conf_state.s0_param.width,
900 pxp->pxp_conf_state.s0_param.height,
901 pxp->pxp_conf_state.s0_param.paddr);
902 pr_debug("%s:%d OUT w/h %d/%d paddr %08x\n", __func__, __LINE__,
903 pxp->pxp_conf_state.out_param.width,
904 pxp->pxp_conf_state.out_param.height,
905 pxp->pxp_conf_state.out_param.paddr);
908 static void pxpdma_dostart_work(struct pxps *pxp)
910 struct pxp_channel *pxp_chan = NULL;
911 unsigned long flags, flags1;
913 while (__raw_readl(pxp->base + HW_PXP_CTRL) & BM_PXP_CTRL_ENABLE)
916 spin_lock_irqsave(&pxp->lock, flags);
917 if (list_empty(&head)) {
918 pxp->pxp_ongoing = 0;
919 spin_unlock_irqrestore(&pxp->lock, flags);
923 pxp_chan = list_entry(head.next, struct pxp_channel, list);
925 spin_lock_irqsave(&pxp_chan->lock, flags1);
926 if (!list_empty(&pxp_chan->active_list)) {
927 struct pxp_tx_desc *desc;
929 desc = pxpdma_first_active(pxp_chan);
930 __pxpdma_dostart(pxp_chan);
932 spin_unlock_irqrestore(&pxp_chan->lock, flags1);
935 pxp_config(pxp, pxp_chan);
939 spin_unlock_irqrestore(&pxp->lock, flags);
942 static void pxpdma_dequeue(struct pxp_channel *pxp_chan, struct list_head *list)
944 struct pxp_tx_desc *desc = NULL;
946 desc = pxpdma_first_queued(pxp_chan);
947 list_move_tail(&desc->list, list);
948 } while (!list_empty(&pxp_chan->queue));
951 static dma_cookie_t pxp_tx_submit(struct dma_async_tx_descriptor *tx)
953 struct pxp_tx_desc *desc = to_tx_desc(tx);
954 struct pxp_channel *pxp_chan = to_pxp_channel(tx->chan);
958 dev_dbg(&pxp_chan->dma_chan.dev->device, "received TX\n");
960 mutex_lock(&pxp_chan->chan_mutex);
962 cookie = pxp_chan->dma_chan.cookie;
967 /* from dmaengine.h: "last cookie value returned to client" */
968 pxp_chan->dma_chan.cookie = cookie;
971 /* pxp_chan->lock can be taken under ichan->lock, but not v.v. */
972 spin_lock_irqsave(&pxp_chan->lock, flags);
974 /* Here we add the tx descriptor to our PxP task queue. */
975 list_add_tail(&desc->list, &pxp_chan->queue);
977 spin_unlock_irqrestore(&pxp_chan->lock, flags);
979 dev_dbg(&pxp_chan->dma_chan.dev->device, "done TX\n");
981 mutex_unlock(&pxp_chan->chan_mutex);
985 /* Called with pxp_chan->chan_mutex held */
986 static int pxp_desc_alloc(struct pxp_channel *pxp_chan, int n)
988 struct pxp_tx_desc *desc = vmalloc(n * sizeof(struct pxp_tx_desc));
993 pxp_chan->n_tx_desc = n;
994 pxp_chan->desc = desc;
995 INIT_LIST_HEAD(&pxp_chan->active_list);
996 INIT_LIST_HEAD(&pxp_chan->queue);
997 INIT_LIST_HEAD(&pxp_chan->free_list);
1000 struct dma_async_tx_descriptor *txd = &desc->txd;
1002 memset(txd, 0, sizeof(*txd));
1003 INIT_LIST_HEAD(&desc->tx_list);
1004 dma_async_tx_descriptor_init(txd, &pxp_chan->dma_chan);
1005 txd->tx_submit = pxp_tx_submit;
1007 list_add(&desc->list, &pxp_chan->free_list);
1016 * pxp_init_channel() - initialize a PXP channel.
1017 * @pxp_dma: PXP DMA context.
1018 * @pchan: pointer to the channel object.
1019 * @return 0 on success or negative error code on failure.
1021 static int pxp_init_channel(struct pxp_dma *pxp_dma,
1022 struct pxp_channel *pxp_chan)
1024 unsigned long flags;
1025 struct pxps *pxp = to_pxp(pxp_dma);
1026 int ret = 0, n_desc = 0;
1029 * We are using _virtual_ channel here.
1030 * Each channel contains all parameters of corresponding layers
1031 * for one transaction; each layer is represented as one descriptor
1032 * (i.e., pxp_tx_desc) here.
1035 spin_lock_irqsave(&pxp->lock, flags);
1037 /* max desc nr: S0+OL+OUT = 1+8+1 */
1040 spin_unlock_irqrestore(&pxp->lock, flags);
1042 if (n_desc && !pxp_chan->desc)
1043 ret = pxp_desc_alloc(pxp_chan, n_desc);
1049 * pxp_uninit_channel() - uninitialize a PXP channel.
1050 * @pxp_dma: PXP DMA context.
1051 * @pchan: pointer to the channel object.
1052 * @return 0 on success or negative error code on failure.
1054 static int pxp_uninit_channel(struct pxp_dma *pxp_dma,
1055 struct pxp_channel *pxp_chan)
1060 vfree(pxp_chan->desc);
1062 pxp_chan->desc = NULL;
1067 static irqreturn_t pxp_irq(int irq, void *dev_id)
1069 struct pxps *pxp = dev_id;
1070 struct pxp_channel *pxp_chan;
1071 struct pxp_tx_desc *desc;
1072 dma_async_tx_callback callback;
1073 void *callback_param;
1074 unsigned long flags;
1080 __raw_readl(pxp->base + HW_PXP_HIST_CTRL) & BM_PXP_HIST_CTRL_STATUS;
1082 __raw_writel(BM_PXP_STAT_IRQ, pxp->base + HW_PXP_STAT_CLR);
1084 spin_lock_irqsave(&pxp->lock, flags);
1086 if (list_empty(&head)) {
1087 pxp->pxp_ongoing = 0;
1088 spin_unlock_irqrestore(&pxp->lock, flags);
1092 pxp_chan = list_entry(head.next, struct pxp_channel, list);
1093 list_del_init(&pxp_chan->list);
1095 if (list_empty(&pxp_chan->active_list)) {
1096 pr_debug("PXP_IRQ pxp_chan->active_list empty. chan_id %d\n",
1097 pxp_chan->dma_chan.chan_id);
1098 pxp->pxp_ongoing = 0;
1099 spin_unlock_irqrestore(&pxp->lock, flags);
1103 /* Get descriptor and call callback */
1104 desc = pxpdma_first_active(pxp_chan);
1106 pxp_chan->completed = desc->txd.cookie;
1108 callback = desc->txd.callback;
1109 callback_param = desc->txd.callback_param;
1111 /* Send histogram status back to caller */
1112 desc->hist_status = hist_status;
1114 if ((desc->txd.flags & DMA_PREP_INTERRUPT) && callback)
1115 callback(callback_param);
1117 pxp_chan->status = PXP_CHANNEL_INITIALIZED;
1119 list_splice_init(&desc->tx_list, &pxp_chan->free_list);
1120 list_move(&desc->list, &pxp_chan->free_list);
1122 wake_up(&pxp->done);
1123 pxp->pxp_ongoing = 0;
1124 mod_timer(&pxp->clk_timer, jiffies + msecs_to_jiffies(timeout_in_ms));
1126 spin_unlock_irqrestore(&pxp->lock, flags);
1131 /* called with pxp_chan->lock held */
1132 static struct pxp_tx_desc *pxpdma_desc_get(struct pxp_channel *pxp_chan)
1134 struct pxp_tx_desc *desc, *_desc;
1135 struct pxp_tx_desc *ret = NULL;
1137 list_for_each_entry_safe(desc, _desc, &pxp_chan->free_list, list) {
1138 list_del_init(&desc->list);
1146 /* called with pxp_chan->lock held */
1147 static void pxpdma_desc_put(struct pxp_channel *pxp_chan,
1148 struct pxp_tx_desc *desc)
1151 struct device *dev = &pxp_chan->dma_chan.dev->device;
1152 struct pxp_tx_desc *child;
1154 list_for_each_entry(child, &desc->tx_list, list)
1155 dev_info(dev, "moving child desc %p to freelist\n", child);
1156 list_splice_init(&desc->tx_list, &pxp_chan->free_list);
1157 dev_info(dev, "moving desc %p to freelist\n", desc);
1158 list_add(&desc->list, &pxp_chan->free_list);
1162 /* Allocate and initialise a transfer descriptor. */
1163 static struct dma_async_tx_descriptor *pxp_prep_slave_sg(struct dma_chan *chan,
1166 unsigned int sg_len,
1168 dma_transfer_direction
1170 unsigned long tx_flags,
1173 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1174 struct pxp_dma *pxp_dma = to_pxp_dma(chan->device);
1175 struct pxps *pxp = to_pxp(pxp_dma);
1176 struct pxp_tx_desc *desc = NULL;
1177 struct pxp_tx_desc *first = NULL, *prev = NULL;
1178 struct scatterlist *sg;
1179 unsigned long flags;
1180 dma_addr_t phys_addr;
1183 if (direction != DMA_DEV_TO_MEM && direction != DMA_MEM_TO_DEV) {
1184 dev_err(chan->device->dev, "Invalid DMA direction %d!\n",
1189 if (unlikely(sg_len < 2))
1192 spin_lock_irqsave(&pxp_chan->lock, flags);
1193 for_each_sg(sgl, sg, sg_len, i) {
1194 desc = pxpdma_desc_get(pxp_chan);
1196 pxpdma_desc_put(pxp_chan, first);
1197 dev_err(chan->device->dev, "Can't get DMA desc.\n");
1198 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1202 phys_addr = sg_dma_address(sg);
1207 desc->layer_param.s0_param.paddr = phys_addr;
1209 list_add_tail(&desc->list, &first->tx_list);
1214 desc->layer_param.out_param.paddr = phys_addr;
1216 desc->layer_param.ol_param.paddr = phys_addr;
1221 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1223 pxp->pxp_conf_state.layer_nr = sg_len;
1224 first->txd.flags = tx_flags;
1225 first->len = sg_len;
1226 pr_debug("%s:%d first %p, first->len %d, flags %08x\n",
1227 __func__, __LINE__, first, first->len, first->txd.flags);
1232 static void pxp_issue_pending(struct dma_chan *chan)
1234 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1235 struct pxp_dma *pxp_dma = to_pxp_dma(chan->device);
1236 struct pxps *pxp = to_pxp(pxp_dma);
1237 unsigned long flags0, flags;
1239 spin_lock_irqsave(&pxp->lock, flags0);
1240 spin_lock_irqsave(&pxp_chan->lock, flags);
1242 if (!list_empty(&pxp_chan->queue)) {
1243 pxpdma_dequeue(pxp_chan, &pxp_chan->active_list);
1244 pxp_chan->status = PXP_CHANNEL_READY;
1245 list_add_tail(&pxp_chan->list, &head);
1247 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1248 spin_unlock_irqrestore(&pxp->lock, flags0);
1251 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1252 spin_unlock_irqrestore(&pxp->lock, flags0);
1254 pxp_clk_enable(pxp);
1255 if (!wait_event_interruptible_timeout(pxp->done, PXP_WAITCON, 2 * HZ) ||
1256 signal_pending(current)) {
1257 pxp_clk_disable(pxp);
1261 spin_lock_irqsave(&pxp->lock, flags);
1262 pxp->pxp_ongoing = 1;
1263 spin_unlock_irqrestore(&pxp->lock, flags);
1264 pxpdma_dostart_work(pxp);
1267 static void __pxp_terminate_all(struct dma_chan *chan)
1269 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1270 unsigned long flags;
1272 /* pchan->queue is modified in ISR, have to spinlock */
1273 spin_lock_irqsave(&pxp_chan->lock, flags);
1274 list_splice_init(&pxp_chan->queue, &pxp_chan->free_list);
1275 list_splice_init(&pxp_chan->active_list, &pxp_chan->free_list);
1277 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1279 pxp_chan->status = PXP_CHANNEL_INITIALIZED;
1282 static int pxp_control(struct dma_chan *chan, enum dma_ctrl_cmd cmd,
1285 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1287 /* Only supports DMA_TERMINATE_ALL */
1288 if (cmd != DMA_TERMINATE_ALL)
1291 mutex_lock(&pxp_chan->chan_mutex);
1292 __pxp_terminate_all(chan);
1293 mutex_unlock(&pxp_chan->chan_mutex);
1298 static int pxp_alloc_chan_resources(struct dma_chan *chan)
1300 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1301 struct pxp_dma *pxp_dma = to_pxp_dma(chan->device);
1304 /* dmaengine.c now guarantees to only offer free channels */
1305 BUG_ON(chan->client_count > 1);
1306 WARN_ON(pxp_chan->status != PXP_CHANNEL_FREE);
1309 pxp_chan->completed = -ENXIO;
1311 pr_debug("%s dma_chan.chan_id %d\n", __func__, chan->chan_id);
1312 ret = pxp_init_channel(pxp_dma, pxp_chan);
1316 pxp_chan->status = PXP_CHANNEL_INITIALIZED;
1318 dev_dbg(&chan->dev->device, "Found channel 0x%x, irq %d\n",
1319 chan->chan_id, pxp_chan->eof_irq);
1327 static void pxp_free_chan_resources(struct dma_chan *chan)
1329 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1330 struct pxp_dma *pxp_dma = to_pxp_dma(chan->device);
1332 mutex_lock(&pxp_chan->chan_mutex);
1334 __pxp_terminate_all(chan);
1336 pxp_chan->status = PXP_CHANNEL_FREE;
1338 pxp_uninit_channel(pxp_dma, pxp_chan);
1340 mutex_unlock(&pxp_chan->chan_mutex);
1343 static enum dma_status pxp_tx_status(struct dma_chan *chan,
1344 dma_cookie_t cookie,
1345 struct dma_tx_state *txstate)
1347 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1349 if (cookie != chan->cookie)
1353 txstate->last = pxp_chan->completed;
1354 txstate->used = chan->cookie;
1355 txstate->residue = 0;
1360 static int pxp_hw_init(struct pxps *pxp)
1362 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
1363 struct pxp_proc_data *proc_data = &pxp_conf->proc_data;
1366 /* Pull PxP out of reset */
1367 __raw_writel(0, pxp->base + HW_PXP_CTRL);
1369 /* Config defaults */
1371 /* Initialize non-channel-specific PxP parameters */
1372 proc_data->drect.left = proc_data->srect.left = 0;
1373 proc_data->drect.top = proc_data->srect.top = 0;
1374 proc_data->drect.width = proc_data->srect.width = 0;
1375 proc_data->drect.height = proc_data->srect.height = 0;
1376 proc_data->scaling = 0;
1377 proc_data->hflip = 0;
1378 proc_data->vflip = 0;
1379 proc_data->rotate = 0;
1380 proc_data->bgcolor = 0;
1382 /* Initialize S0 channel parameters */
1383 pxp_conf->s0_param.pixel_fmt = pxp_s0_formats[0];
1384 pxp_conf->s0_param.width = 0;
1385 pxp_conf->s0_param.height = 0;
1386 pxp_conf->s0_param.color_key = -1;
1387 pxp_conf->s0_param.color_key_enable = false;
1389 /* Initialize OL channel parameters */
1390 pxp_conf->ol_param[0].combine_enable = false;
1391 pxp_conf->ol_param[0].width = 0;
1392 pxp_conf->ol_param[0].height = 0;
1393 pxp_conf->ol_param[0].pixel_fmt = PXP_PIX_FMT_RGB565;
1394 pxp_conf->ol_param[0].color_key_enable = false;
1395 pxp_conf->ol_param[0].color_key = -1;
1396 pxp_conf->ol_param[0].global_alpha_enable = false;
1397 pxp_conf->ol_param[0].global_alpha = 0;
1398 pxp_conf->ol_param[0].local_alpha_enable = false;
1400 /* Initialize Output channel parameters */
1401 pxp_conf->out_param.width = 0;
1402 pxp_conf->out_param.height = 0;
1403 pxp_conf->out_param.pixel_fmt = PXP_PIX_FMT_RGB565;
1405 proc_data->overlay_state = 0;
1407 /* Write default h/w config */
1409 pxp_set_s0param(pxp);
1410 pxp_set_s0crop(pxp);
1412 * simply program the ULC to a higher value than the LRC
1413 * to avoid any AS pixels to show up in the output buffer.
1415 __raw_writel(0xFFFFFFFF, pxp->base + HW_PXP_OUT_AS_ULC);
1416 pxp_set_olparam(0, pxp);
1417 pxp_set_olcolorkey(0, pxp);
1419 pxp_set_s0colorkey(pxp);
1424 /* One-time histogram configuration */
1426 BF_PXP_HIST_CTRL_PANEL_MODE(BV_PXP_HIST_CTRL_PANEL_MODE__GRAY16);
1427 __raw_writel(reg_val, pxp->base + HW_PXP_HIST_CTRL);
1429 reg_val = BF_PXP_HIST2_PARAM_VALUE0(0x00) |
1430 BF_PXP_HIST2_PARAM_VALUE1(0x00F);
1431 __raw_writel(reg_val, pxp->base + HW_PXP_HIST2_PARAM);
1433 reg_val = BF_PXP_HIST4_PARAM_VALUE0(0x00) |
1434 BF_PXP_HIST4_PARAM_VALUE1(0x05) |
1435 BF_PXP_HIST4_PARAM_VALUE2(0x0A) | BF_PXP_HIST4_PARAM_VALUE3(0x0F);
1436 __raw_writel(reg_val, pxp->base + HW_PXP_HIST4_PARAM);
1438 reg_val = BF_PXP_HIST8_PARAM0_VALUE0(0x00) |
1439 BF_PXP_HIST8_PARAM0_VALUE1(0x02) |
1440 BF_PXP_HIST8_PARAM0_VALUE2(0x04) | BF_PXP_HIST8_PARAM0_VALUE3(0x06);
1441 __raw_writel(reg_val, pxp->base + HW_PXP_HIST8_PARAM0);
1442 reg_val = BF_PXP_HIST8_PARAM1_VALUE4(0x09) |
1443 BF_PXP_HIST8_PARAM1_VALUE5(0x0B) |
1444 BF_PXP_HIST8_PARAM1_VALUE6(0x0D) | BF_PXP_HIST8_PARAM1_VALUE7(0x0F);
1445 __raw_writel(reg_val, pxp->base + HW_PXP_HIST8_PARAM1);
1447 reg_val = BF_PXP_HIST16_PARAM0_VALUE0(0x00) |
1448 BF_PXP_HIST16_PARAM0_VALUE1(0x01) |
1449 BF_PXP_HIST16_PARAM0_VALUE2(0x02) |
1450 BF_PXP_HIST16_PARAM0_VALUE3(0x03);
1451 __raw_writel(reg_val, pxp->base + HW_PXP_HIST16_PARAM0);
1452 reg_val = BF_PXP_HIST16_PARAM1_VALUE4(0x04) |
1453 BF_PXP_HIST16_PARAM1_VALUE5(0x05) |
1454 BF_PXP_HIST16_PARAM1_VALUE6(0x06) |
1455 BF_PXP_HIST16_PARAM1_VALUE7(0x07);
1456 __raw_writel(reg_val, pxp->base + HW_PXP_HIST16_PARAM1);
1457 reg_val = BF_PXP_HIST16_PARAM2_VALUE8(0x08) |
1458 BF_PXP_HIST16_PARAM2_VALUE9(0x09) |
1459 BF_PXP_HIST16_PARAM2_VALUE10(0x0A) |
1460 BF_PXP_HIST16_PARAM2_VALUE11(0x0B);
1461 __raw_writel(reg_val, pxp->base + HW_PXP_HIST16_PARAM2);
1462 reg_val = BF_PXP_HIST16_PARAM3_VALUE12(0x0C) |
1463 BF_PXP_HIST16_PARAM3_VALUE13(0x0D) |
1464 BF_PXP_HIST16_PARAM3_VALUE14(0x0E) |
1465 BF_PXP_HIST16_PARAM3_VALUE15(0x0F);
1466 __raw_writel(reg_val, pxp->base + HW_PXP_HIST16_PARAM3);
1471 static int pxp_dma_init(struct pxps *pxp)
1473 struct pxp_dma *pxp_dma = &pxp->pxp_dma;
1474 struct dma_device *dma = &pxp_dma->dma;
1477 dma_cap_set(DMA_SLAVE, dma->cap_mask);
1478 dma_cap_set(DMA_PRIVATE, dma->cap_mask);
1480 /* Compulsory common fields */
1481 dma->dev = pxp->dev;
1482 dma->device_alloc_chan_resources = pxp_alloc_chan_resources;
1483 dma->device_free_chan_resources = pxp_free_chan_resources;
1484 dma->device_tx_status = pxp_tx_status;
1485 dma->device_issue_pending = pxp_issue_pending;
1487 /* Compulsory for DMA_SLAVE fields */
1488 dma->device_prep_slave_sg = pxp_prep_slave_sg;
1489 dma->device_control = pxp_control;
1491 /* Initialize PxP Channels */
1492 INIT_LIST_HEAD(&dma->channels);
1493 for (i = 0; i < NR_PXP_VIRT_CHANNEL; i++) {
1494 struct pxp_channel *pxp_chan = pxp->channel + i;
1495 struct dma_chan *dma_chan = &pxp_chan->dma_chan;
1497 spin_lock_init(&pxp_chan->lock);
1498 mutex_init(&pxp_chan->chan_mutex);
1500 /* Only one EOF IRQ for PxP, shared by all channels */
1501 pxp_chan->eof_irq = pxp->irq;
1502 pxp_chan->status = PXP_CHANNEL_FREE;
1503 pxp_chan->completed = -ENXIO;
1504 snprintf(pxp_chan->eof_name, sizeof(pxp_chan->eof_name),
1507 dma_chan->device = &pxp_dma->dma;
1508 dma_chan->cookie = 1;
1509 dma_chan->chan_id = i;
1510 list_add_tail(&dma_chan->device_node, &dma->channels);
1513 return dma_async_device_register(&pxp_dma->dma);
1516 static ssize_t clk_off_timeout_show(struct device *dev,
1517 struct device_attribute *attr, char *buf)
1519 return sprintf(buf, "%d\n", timeout_in_ms);
1522 static ssize_t clk_off_timeout_store(struct device *dev,
1523 struct device_attribute *attr,
1524 const char *buf, size_t count)
1527 if (sscanf(buf, "%d", &val) > 0) {
1528 timeout_in_ms = val;
1534 static DEVICE_ATTR(clk_off_timeout, 0644, clk_off_timeout_show,
1535 clk_off_timeout_store);
1537 static const struct of_device_id imx_pxpdma_dt_ids[] = {
1538 { .compatible = "fsl,imx6dl-pxp-dma", },
1541 MODULE_DEVICE_TABLE(of, imx_pxpdma_dt_ids);
1543 static int pxp_probe(struct platform_device *pdev)
1546 struct resource *res;
1550 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1551 irq = platform_get_irq(pdev, 0);
1552 if (!res || irq < 0) {
1557 pxp = devm_kzalloc(&pdev->dev, sizeof(*pxp), GFP_KERNEL);
1559 dev_err(&pdev->dev, "failed to allocate control object\n");
1564 pxp->dev = &pdev->dev;
1566 platform_set_drvdata(pdev, pxp);
1569 pxp->pxp_ongoing = 0;
1572 spin_lock_init(&pxp->lock);
1573 mutex_init(&pxp->clk_mutex);
1575 pxp->base = devm_request_and_ioremap(&pdev->dev, res);
1576 if (pxp->base == NULL) {
1577 dev_err(&pdev->dev, "Couldn't ioremap regs\n");
1584 pxp->clk = devm_clk_get(&pdev->dev, "pxp-axi");
1585 clk_prepare_enable(pxp->clk);
1587 err = pxp_hw_init(pxp);
1588 clk_disable_unprepare(pxp->clk);
1590 dev_err(&pdev->dev, "failed to initialize hardware\n");
1594 err = devm_request_irq(&pdev->dev, pxp->irq, pxp_irq, 0,
1595 "pxp-dmaengine", pxp);
1598 /* Initialize DMA engine */
1599 err = pxp_dma_init(pxp);
1603 if (device_create_file(&pdev->dev, &dev_attr_clk_off_timeout)) {
1605 "Unable to create file from clk_off_timeout\n");
1610 INIT_WORK(&pxp->work, clkoff_callback);
1611 init_waitqueue_head(&pxp->done);
1612 init_timer(&pxp->clk_timer);
1613 pxp->clk_timer.function = pxp_clkoff_timer;
1614 pxp->clk_timer.data = (unsigned long)pxp;
1616 register_pxp_device();
1620 dev_err(&pdev->dev, "Exiting (unsuccessfully) pxp_probe()\n");
1624 static int pxp_remove(struct platform_device *pdev)
1626 struct pxps *pxp = platform_get_drvdata(pdev);
1628 unregister_pxp_device();
1629 cancel_work_sync(&pxp->work);
1630 del_timer_sync(&pxp->clk_timer);
1631 clk_disable_unprepare(pxp->clk);
1632 device_remove_file(&pdev->dev, &dev_attr_clk_off_timeout);
1638 static int pxp_suspend(struct platform_device *pdev, pm_message_t state)
1640 struct pxps *pxp = platform_get_drvdata(pdev);
1642 pxp_clk_enable(pxp);
1643 while (__raw_readl(pxp->base + HW_PXP_CTRL) & BM_PXP_CTRL_ENABLE)
1646 __raw_writel(BM_PXP_CTRL_SFTRST, pxp->base + HW_PXP_CTRL);
1647 pxp_clk_disable(pxp);
1652 static int pxp_resume(struct platform_device *pdev)
1654 struct pxps *pxp = platform_get_drvdata(pdev);
1656 pxp_clk_enable(pxp);
1657 /* Pull PxP out of reset */
1658 __raw_writel(0, pxp->base + HW_PXP_CTRL);
1659 pxp_clk_disable(pxp);
1664 #define pxp_suspend NULL
1665 #define pxp_resume NULL
1668 static struct platform_driver pxp_driver = {
1671 .of_match_table = of_match_ptr(imx_pxpdma_dt_ids),
1674 .remove = pxp_remove,
1675 .suspend = pxp_suspend,
1676 .resume = pxp_resume,
1679 module_platform_driver(pxp_driver);
1682 MODULE_DESCRIPTION("i.MX PxP driver");
1683 MODULE_AUTHOR("Freescale Semiconductor, Inc.");
1684 MODULE_LICENSE("GPL");