2 * Copyright (C) 2010-2013 Freescale Semiconductor, Inc.
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License as published by
6 * the Free Software Foundation; either version 2 of the License, or
7 * (at your option) any later version.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, write to the Free Software
16 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 * Based on STMP378X PxP driver
21 * Copyright 2008-2009 Embedded Alley Solutions, Inc All Rights Reserved.
24 #include <linux/dma-mapping.h>
25 #include <linux/init.h>
26 #include <linux/interrupt.h>
28 #include <linux/kernel.h>
29 #include <linux/module.h>
30 #include <linux/mutex.h>
31 #include <linux/platform_device.h>
32 #include <linux/slab.h>
33 #include <linux/vmalloc.h>
34 #include <linux/dmaengine.h>
35 #include <linux/pxp_dma.h>
36 #include <linux/timer.h>
37 #include <linux/clk.h>
38 #include <linux/workqueue.h>
39 #include <linux/sched.h>
42 #include "regs-pxp_v2.h"
44 #define PXP_DOWNSCALE_THRESHOLD 0x4000
46 static LIST_HEAD(head);
47 static int timeout_in_ms = 600;
50 struct dma_device dma;
54 struct platform_device *pdev;
57 int irq; /* PXP IRQ to the CPU */
60 struct mutex clk_mutex;
62 #define CLK_STAT_OFF 0
68 struct pxp_dma pxp_dma;
69 struct pxp_channel channel[NR_PXP_VIRT_CHANNEL];
70 wait_queue_head_t done;
71 struct work_struct work;
73 /* describes most recent processing configuration */
74 struct pxp_config_data pxp_conf_state;
76 /* to turn clock off when pxp is inactive */
77 struct timer_list clk_timer;
80 #define to_pxp_dma(d) container_of(d, struct pxp_dma, dma)
81 #define to_tx_desc(tx) container_of(tx, struct pxp_tx_desc, txd)
82 #define to_pxp_channel(d) container_of(d, struct pxp_channel, dma_chan)
83 #define to_pxp(id) container_of(id, struct pxps, pxp_dma)
85 #define PXP_DEF_BUFS 2
88 #define PXP_WAITCON ((__raw_readl(pxp->base + HW_PXP_STAT) & \
89 BM_PXP_STAT_IRQ) != BM_PXP_STAT_IRQ)
91 static uint32_t pxp_s0_formats[] = {
100 * PXP common functions
102 static void dump_pxp_reg(struct pxps *pxp)
104 dev_dbg(pxp->dev, "PXP_CTRL 0x%x",
105 __raw_readl(pxp->base + HW_PXP_CTRL));
106 dev_dbg(pxp->dev, "PXP_STAT 0x%x",
107 __raw_readl(pxp->base + HW_PXP_STAT));
108 dev_dbg(pxp->dev, "PXP_OUT_CTRL 0x%x",
109 __raw_readl(pxp->base + HW_PXP_OUT_CTRL));
110 dev_dbg(pxp->dev, "PXP_OUT_BUF 0x%x",
111 __raw_readl(pxp->base + HW_PXP_OUT_BUF));
112 dev_dbg(pxp->dev, "PXP_OUT_BUF2 0x%x",
113 __raw_readl(pxp->base + HW_PXP_OUT_BUF2));
114 dev_dbg(pxp->dev, "PXP_OUT_PITCH 0x%x",
115 __raw_readl(pxp->base + HW_PXP_OUT_PITCH));
116 dev_dbg(pxp->dev, "PXP_OUT_LRC 0x%x",
117 __raw_readl(pxp->base + HW_PXP_OUT_LRC));
118 dev_dbg(pxp->dev, "PXP_OUT_PS_ULC 0x%x",
119 __raw_readl(pxp->base + HW_PXP_OUT_PS_ULC));
120 dev_dbg(pxp->dev, "PXP_OUT_PS_LRC 0x%x",
121 __raw_readl(pxp->base + HW_PXP_OUT_PS_LRC));
122 dev_dbg(pxp->dev, "PXP_OUT_AS_ULC 0x%x",
123 __raw_readl(pxp->base + HW_PXP_OUT_AS_ULC));
124 dev_dbg(pxp->dev, "PXP_OUT_AS_LRC 0x%x",
125 __raw_readl(pxp->base + HW_PXP_OUT_AS_LRC));
126 dev_dbg(pxp->dev, "PXP_PS_CTRL 0x%x",
127 __raw_readl(pxp->base + HW_PXP_PS_CTRL));
128 dev_dbg(pxp->dev, "PXP_PS_BUF 0x%x",
129 __raw_readl(pxp->base + HW_PXP_PS_BUF));
130 dev_dbg(pxp->dev, "PXP_PS_UBUF 0x%x",
131 __raw_readl(pxp->base + HW_PXP_PS_UBUF));
132 dev_dbg(pxp->dev, "PXP_PS_VBUF 0x%x",
133 __raw_readl(pxp->base + HW_PXP_PS_VBUF));
134 dev_dbg(pxp->dev, "PXP_PS_PITCH 0x%x",
135 __raw_readl(pxp->base + HW_PXP_PS_PITCH));
136 dev_dbg(pxp->dev, "PXP_PS_BACKGROUND 0x%x",
137 __raw_readl(pxp->base + HW_PXP_PS_BACKGROUND));
138 dev_dbg(pxp->dev, "PXP_PS_SCALE 0x%x",
139 __raw_readl(pxp->base + HW_PXP_PS_SCALE));
140 dev_dbg(pxp->dev, "PXP_PS_OFFSET 0x%x",
141 __raw_readl(pxp->base + HW_PXP_PS_OFFSET));
142 dev_dbg(pxp->dev, "PXP_PS_CLRKEYLOW 0x%x",
143 __raw_readl(pxp->base + HW_PXP_PS_CLRKEYLOW));
144 dev_dbg(pxp->dev, "PXP_PS_CLRKEYHIGH 0x%x",
145 __raw_readl(pxp->base + HW_PXP_PS_CLRKEYHIGH));
146 dev_dbg(pxp->dev, "PXP_AS_CTRL 0x%x",
147 __raw_readl(pxp->base + HW_PXP_AS_CTRL));
148 dev_dbg(pxp->dev, "PXP_AS_BUF 0x%x",
149 __raw_readl(pxp->base + HW_PXP_AS_BUF));
150 dev_dbg(pxp->dev, "PXP_AS_PITCH 0x%x",
151 __raw_readl(pxp->base + HW_PXP_AS_PITCH));
152 dev_dbg(pxp->dev, "PXP_AS_CLRKEYLOW 0x%x",
153 __raw_readl(pxp->base + HW_PXP_AS_CLRKEYLOW));
154 dev_dbg(pxp->dev, "PXP_AS_CLRKEYHIGH 0x%x",
155 __raw_readl(pxp->base + HW_PXP_AS_CLRKEYHIGH));
156 dev_dbg(pxp->dev, "PXP_CSC1_COEF0 0x%x",
157 __raw_readl(pxp->base + HW_PXP_CSC1_COEF0));
158 dev_dbg(pxp->dev, "PXP_CSC1_COEF1 0x%x",
159 __raw_readl(pxp->base + HW_PXP_CSC1_COEF1));
160 dev_dbg(pxp->dev, "PXP_CSC1_COEF2 0x%x",
161 __raw_readl(pxp->base + HW_PXP_CSC1_COEF2));
162 dev_dbg(pxp->dev, "PXP_CSC2_CTRL 0x%x",
163 __raw_readl(pxp->base + HW_PXP_CSC2_CTRL));
164 dev_dbg(pxp->dev, "PXP_CSC2_COEF0 0x%x",
165 __raw_readl(pxp->base + HW_PXP_CSC2_COEF0));
166 dev_dbg(pxp->dev, "PXP_CSC2_COEF1 0x%x",
167 __raw_readl(pxp->base + HW_PXP_CSC2_COEF1));
168 dev_dbg(pxp->dev, "PXP_CSC2_COEF2 0x%x",
169 __raw_readl(pxp->base + HW_PXP_CSC2_COEF2));
170 dev_dbg(pxp->dev, "PXP_CSC2_COEF3 0x%x",
171 __raw_readl(pxp->base + HW_PXP_CSC2_COEF3));
172 dev_dbg(pxp->dev, "PXP_CSC2_COEF4 0x%x",
173 __raw_readl(pxp->base + HW_PXP_CSC2_COEF4));
174 dev_dbg(pxp->dev, "PXP_CSC2_COEF5 0x%x",
175 __raw_readl(pxp->base + HW_PXP_CSC2_COEF5));
176 dev_dbg(pxp->dev, "PXP_LUT_CTRL 0x%x",
177 __raw_readl(pxp->base + HW_PXP_LUT_CTRL));
178 dev_dbg(pxp->dev, "PXP_LUT_ADDR 0x%x",
179 __raw_readl(pxp->base + HW_PXP_LUT_ADDR));
180 dev_dbg(pxp->dev, "PXP_LUT_DATA 0x%x",
181 __raw_readl(pxp->base + HW_PXP_LUT_DATA));
182 dev_dbg(pxp->dev, "PXP_LUT_EXTMEM 0x%x",
183 __raw_readl(pxp->base + HW_PXP_LUT_EXTMEM));
184 dev_dbg(pxp->dev, "PXP_CFA 0x%x",
185 __raw_readl(pxp->base + HW_PXP_CFA));
186 dev_dbg(pxp->dev, "PXP_HIST_CTRL 0x%x",
187 __raw_readl(pxp->base + HW_PXP_HIST_CTRL));
188 dev_dbg(pxp->dev, "PXP_HIST2_PARAM 0x%x",
189 __raw_readl(pxp->base + HW_PXP_HIST2_PARAM));
190 dev_dbg(pxp->dev, "PXP_HIST4_PARAM 0x%x",
191 __raw_readl(pxp->base + HW_PXP_HIST4_PARAM));
192 dev_dbg(pxp->dev, "PXP_HIST8_PARAM0 0x%x",
193 __raw_readl(pxp->base + HW_PXP_HIST8_PARAM0));
194 dev_dbg(pxp->dev, "PXP_HIST8_PARAM1 0x%x",
195 __raw_readl(pxp->base + HW_PXP_HIST8_PARAM1));
196 dev_dbg(pxp->dev, "PXP_HIST16_PARAM0 0x%x",
197 __raw_readl(pxp->base + HW_PXP_HIST16_PARAM0));
198 dev_dbg(pxp->dev, "PXP_HIST16_PARAM1 0x%x",
199 __raw_readl(pxp->base + HW_PXP_HIST16_PARAM1));
200 dev_dbg(pxp->dev, "PXP_HIST16_PARAM2 0x%x",
201 __raw_readl(pxp->base + HW_PXP_HIST16_PARAM2));
202 dev_dbg(pxp->dev, "PXP_HIST16_PARAM3 0x%x",
203 __raw_readl(pxp->base + HW_PXP_HIST16_PARAM3));
204 dev_dbg(pxp->dev, "PXP_POWER 0x%x",
205 __raw_readl(pxp->base + HW_PXP_POWER));
206 dev_dbg(pxp->dev, "PXP_NEXT 0x%x",
207 __raw_readl(pxp->base + HW_PXP_NEXT));
208 dev_dbg(pxp->dev, "PXP_DEBUGCTRL 0x%x",
209 __raw_readl(pxp->base + HW_PXP_DEBUGCTRL));
210 dev_dbg(pxp->dev, "PXP_DEBUG 0x%x",
211 __raw_readl(pxp->base + HW_PXP_DEBUG));
212 dev_dbg(pxp->dev, "PXP_VERSION 0x%x",
213 __raw_readl(pxp->base + HW_PXP_VERSION));
216 static bool is_yuv(u32 pix_fmt)
218 if ((pix_fmt == PXP_PIX_FMT_YUYV) |
219 (pix_fmt == PXP_PIX_FMT_UYVY) |
220 (pix_fmt == PXP_PIX_FMT_Y41P) |
221 (pix_fmt == PXP_PIX_FMT_YUV444) |
222 (pix_fmt == PXP_PIX_FMT_NV12) |
223 (pix_fmt == PXP_PIX_FMT_GREY) |
224 (pix_fmt == PXP_PIX_FMT_GY04) |
225 (pix_fmt == PXP_PIX_FMT_YVU410P) |
226 (pix_fmt == PXP_PIX_FMT_YUV410P) |
227 (pix_fmt == PXP_PIX_FMT_YVU420P) |
228 (pix_fmt == PXP_PIX_FMT_YUV420P) |
229 (pix_fmt == PXP_PIX_FMT_YUV420P2) |
230 (pix_fmt == PXP_PIX_FMT_YVU422P) |
231 (pix_fmt == PXP_PIX_FMT_YUV422P)) {
238 static void pxp_set_ctrl(struct pxps *pxp)
240 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
241 struct pxp_proc_data *proc_data = &pxp_conf->proc_data;
245 /* Configure S0 input format */
246 switch (pxp_conf->s0_param.pixel_fmt) {
247 case PXP_PIX_FMT_RGB24:
248 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__RGB888;
250 case PXP_PIX_FMT_RGB565:
251 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__RGB565;
253 case PXP_PIX_FMT_RGB555:
254 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__RGB555;
256 case PXP_PIX_FMT_YUV420P:
257 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__YUV420;
259 case PXP_PIX_FMT_GREY:
260 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__Y8;
262 case PXP_PIX_FMT_GY04:
263 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__Y4;
265 case PXP_PIX_FMT_YUV422P:
266 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__YUV422;
268 case PXP_PIX_FMT_UYVY:
269 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__UYVY1P422;
275 ctrl = BF_PXP_PS_CTRL_FORMAT(fmt_ctrl);
276 __raw_writel(ctrl, pxp->base + HW_PXP_PS_CTRL);
278 /* Configure output format based on out_channel format */
279 switch (pxp_conf->out_param.pixel_fmt) {
280 case PXP_PIX_FMT_RGB24:
281 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__RGB888;
283 case PXP_PIX_FMT_RGB565:
284 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__RGB565;
286 case PXP_PIX_FMT_RGB555:
287 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__RGB555;
289 case PXP_PIX_FMT_YUV420P:
290 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__YUV2P420;
292 case PXP_PIX_FMT_YUV422P:
293 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__YUV2P422;
295 case PXP_PIX_FMT_GREY:
296 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__Y8;
298 case PXP_PIX_FMT_GY04:
299 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__Y4;
305 ctrl = BF_PXP_OUT_CTRL_FORMAT(fmt_ctrl);
306 __raw_writel(ctrl, pxp->base + HW_PXP_OUT_CTRL);
309 if (proc_data->scaling)
311 if (proc_data->vflip)
312 ctrl |= BM_PXP_CTRL_VFLIP;
313 if (proc_data->hflip)
314 ctrl |= BM_PXP_CTRL_HFLIP;
315 if (proc_data->rotate)
316 ctrl |= BF_PXP_CTRL_ROTATE(proc_data->rotate / 90);
318 /* H/W support: controls where rotation will occur in the PXP datapath,
319 * will export an interfance if needed
321 /* ctrl |= BM_PXP_CTRL_ROT_POS; post rotation */
323 __raw_writel(ctrl, pxp->base + HW_PXP_CTRL);
326 static int pxp_start(struct pxps *pxp)
328 __raw_writel(BM_PXP_CTRL_IRQ_ENABLE, pxp->base + HW_PXP_CTRL_SET);
329 __raw_writel(BM_PXP_CTRL_ENABLE, pxp->base + HW_PXP_CTRL_SET);
335 static void pxp_set_outbuf(struct pxps *pxp)
337 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
338 struct pxp_layer_param *out_params = &pxp_conf->out_param;
340 __raw_writel(out_params->paddr, pxp->base + HW_PXP_OUT_BUF);
342 __raw_writel(BF_PXP_OUT_LRC_X(out_params->width - 1) |
343 BF_PXP_OUT_LRC_Y(out_params->height - 1),
344 pxp->base + HW_PXP_OUT_LRC);
346 if (out_params->pixel_fmt == PXP_PIX_FMT_RGB24)
347 __raw_writel(out_params->stride << 2,
348 pxp->base + HW_PXP_OUT_PITCH);
349 else if (out_params->pixel_fmt == PXP_PIX_FMT_RGB565)
350 __raw_writel(out_params->stride << 1,
351 pxp->base + HW_PXP_OUT_PITCH);
353 __raw_writel(out_params->stride, pxp->base + HW_PXP_OUT_PITCH);
356 static void pxp_set_s0colorkey(struct pxps *pxp)
358 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
359 struct pxp_layer_param *s0_params = &pxp_conf->s0_param;
361 /* Low and high are set equal. V4L does not allow a chromakey range */
362 if (s0_params->color_key == -1) {
363 /* disable color key */
364 __raw_writel(0xFFFFFF, pxp->base + HW_PXP_PS_CLRKEYLOW);
365 __raw_writel(0, pxp->base + HW_PXP_PS_CLRKEYHIGH);
367 __raw_writel(s0_params->color_key,
368 pxp->base + HW_PXP_PS_CLRKEYLOW);
369 __raw_writel(s0_params->color_key,
370 pxp->base + HW_PXP_PS_CLRKEYHIGH);
374 static void pxp_set_olcolorkey(int layer_no, struct pxps *pxp)
376 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
377 struct pxp_layer_param *ol_params = &pxp_conf->ol_param[layer_no];
379 /* Low and high are set equal. V4L does not allow a chromakey range */
380 if (ol_params->color_key_enable != 0 && ol_params->color_key != -1) {
381 __raw_writel(ol_params->color_key,
382 pxp->base + HW_PXP_AS_CLRKEYLOW);
383 __raw_writel(ol_params->color_key,
384 pxp->base + HW_PXP_AS_CLRKEYHIGH);
386 /* disable color key */
387 __raw_writel(0xFFFFFF, pxp->base + HW_PXP_AS_CLRKEYLOW);
388 __raw_writel(0, pxp->base + HW_PXP_AS_CLRKEYHIGH);
392 static void pxp_set_oln(int layer_no, struct pxps *pxp)
394 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
395 struct pxp_layer_param *olparams_data = &pxp_conf->ol_param[layer_no];
396 dma_addr_t phys_addr = olparams_data->paddr;
397 __raw_writel(phys_addr, pxp->base + HW_PXP_AS_BUF);
400 __raw_writel(0x0, pxp->base + HW_PXP_OUT_AS_ULC);
401 __raw_writel(BF_PXP_OUT_AS_LRC_X(olparams_data->width) |
402 BF_PXP_OUT_AS_LRC_Y(olparams_data->height),
403 pxp->base + HW_PXP_OUT_AS_LRC);
405 if (olparams_data->pixel_fmt == PXP_PIX_FMT_RGB24)
406 __raw_writel(olparams_data->width << 2,
407 pxp->base + HW_PXP_AS_PITCH);
409 __raw_writel(olparams_data->width << 1,
410 pxp->base + HW_PXP_AS_PITCH);
413 static void pxp_set_olparam(int layer_no, struct pxps *pxp)
415 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
416 struct pxp_layer_param *olparams_data = &pxp_conf->ol_param[layer_no];
419 olparam = BF_PXP_AS_CTRL_ALPHA(olparams_data->global_alpha);
420 if (olparams_data->pixel_fmt == PXP_PIX_FMT_RGB24)
422 BF_PXP_AS_CTRL_FORMAT(BV_PXP_AS_CTRL_FORMAT__RGB888);
425 BF_PXP_AS_CTRL_FORMAT(BV_PXP_AS_CTRL_FORMAT__RGB565);
426 if (olparams_data->global_alpha_enable)
428 BF_PXP_AS_CTRL_ALPHA_CTRL
429 (BV_PXP_AS_CTRL_ALPHA_CTRL__Override);
430 if (olparams_data->color_key_enable)
431 olparam |= BM_PXP_AS_CTRL_ENABLE_COLORKEY;
432 if (olparams_data->combine_enable)
434 __raw_writel(olparam, pxp->base + HW_PXP_AS_CTRL);
437 static void pxp_set_s0param(struct pxps *pxp)
439 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
440 struct pxp_proc_data *proc_data = &pxp_conf->proc_data;
443 /* contains the coordinate for the PS in the OUTPUT buffer. */
444 s0param = BF_PXP_OUT_PS_ULC_X(proc_data->drect.left);
445 s0param |= BF_PXP_OUT_PS_ULC_Y(proc_data->drect.top);
446 __raw_writel(s0param, pxp->base + HW_PXP_OUT_PS_ULC);
447 s0param = BF_PXP_OUT_PS_LRC_X(proc_data->drect.left +
448 proc_data->drect.width - 1);
449 s0param |= BF_PXP_OUT_PS_LRC_Y(proc_data->drect.top +
450 proc_data->drect.height - 1);
451 __raw_writel(s0param, pxp->base + HW_PXP_OUT_PS_LRC);
454 /* crop behavior is re-designed in h/w. */
455 static void pxp_set_s0crop(struct pxps *pxp)
458 * place-holder, it's implemented in other functions in this driver.
459 * Refer to "Clipping source images" section in RM for detail.
463 static int pxp_set_scaling(struct pxps *pxp)
466 u32 xscale, yscale, s0scale;
467 struct pxp_proc_data *proc_data = &pxp->pxp_conf_state.proc_data;
469 if ((proc_data->srect.width == proc_data->drect.width) &&
470 (proc_data->srect.height == proc_data->drect.height)) {
471 proc_data->scaling = 0;
472 __raw_writel(0x10001000, pxp->base + HW_PXP_PS_SCALE);
476 proc_data->scaling = 1;
477 xscale = proc_data->srect.width * 0x1000 / proc_data->drect.width;
478 yscale = proc_data->srect.height * 0x1000 / proc_data->drect.height;
479 if (xscale > PXP_DOWNSCALE_THRESHOLD)
480 xscale = PXP_DOWNSCALE_THRESHOLD;
481 if (yscale > PXP_DOWNSCALE_THRESHOLD)
482 yscale = PXP_DOWNSCALE_THRESHOLD;
483 s0scale = BF_PXP_PS_SCALE_YSCALE(yscale) |
484 BF_PXP_PS_SCALE_XSCALE(xscale);
485 __raw_writel(s0scale, pxp->base + HW_PXP_PS_SCALE);
493 static void pxp_set_bg(struct pxps *pxp)
495 __raw_writel(pxp->pxp_conf_state.proc_data.bgcolor,
496 pxp->base + HW_PXP_PS_BACKGROUND);
499 static void pxp_set_lut(struct pxps *pxp)
501 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
502 int lut_op = pxp_conf->proc_data.lut_transform;
505 bool use_cmap = (lut_op & PXP_LUT_USE_CMAP) ? true : false;
506 u8 *cmap = pxp_conf->proc_data.lut_map;
512 * If LUT already configured as needed, return...
513 * Unless CMAP is needed and it has been updated.
515 if ((pxp->lut_state == lut_op) &&
516 !(use_cmap && pxp_conf->proc_data.lut_map_updated))
519 if (lut_op == PXP_LUT_NONE) {
520 __raw_writel(BM_PXP_LUT_CTRL_BYPASS,
521 pxp->base + HW_PXP_LUT_CTRL);
522 } else if (((lut_op & PXP_LUT_INVERT) != 0)
523 && ((lut_op & PXP_LUT_BLACK_WHITE) != 0)) {
524 /* Fill out LUT table with inverted monochromized values */
526 /* clear bypass bit, set lookup mode & out mode */
527 __raw_writel(BF_PXP_LUT_CTRL_LOOKUP_MODE
528 (BV_PXP_LUT_CTRL_LOOKUP_MODE__DIRECT_Y8) |
529 BF_PXP_LUT_CTRL_OUT_MODE
530 (BV_PXP_LUT_CTRL_OUT_MODE__Y8),
531 pxp->base + HW_PXP_LUT_CTRL);
533 /* Initialize LUT address to 0 and set NUM_BYTES to 0 */
534 __raw_writel(0, pxp->base + HW_PXP_LUT_ADDR);
536 /* LUT address pointer auto-increments after each data write */
537 for (pix_val = 0; pix_val < 256; pix_val += 4) {
538 for (i = 0; i < 4; i++) {
539 entry_src = use_cmap ?
540 cmap[pix_val + i] : pix_val + i;
541 entry[i] = (entry_src < 0x80) ? 0xFF : 0x00;
543 reg_val = (entry[3] << 24) | (entry[2] << 16) |
544 (entry[1] << 8) | entry[0];
545 __raw_writel(reg_val, pxp->base + HW_PXP_LUT_DATA);
547 } else if ((lut_op & PXP_LUT_INVERT) != 0) {
548 /* Fill out LUT table with 8-bit inverted values */
550 /* clear bypass bit, set lookup mode & out mode */
551 __raw_writel(BF_PXP_LUT_CTRL_LOOKUP_MODE
552 (BV_PXP_LUT_CTRL_LOOKUP_MODE__DIRECT_Y8) |
553 BF_PXP_LUT_CTRL_OUT_MODE
554 (BV_PXP_LUT_CTRL_OUT_MODE__Y8),
555 pxp->base + HW_PXP_LUT_CTRL);
557 /* Initialize LUT address to 0 and set NUM_BYTES to 0 */
558 __raw_writel(0, pxp->base + HW_PXP_LUT_ADDR);
560 /* LUT address pointer auto-increments after each data write */
561 for (pix_val = 0; pix_val < 256; pix_val += 4) {
562 for (i = 0; i < 4; i++) {
563 entry_src = use_cmap ?
564 cmap[pix_val + i] : pix_val + i;
565 entry[i] = ~entry_src & 0xFF;
567 reg_val = (entry[3] << 24) | (entry[2] << 16) |
568 (entry[1] << 8) | entry[0];
569 __raw_writel(reg_val, pxp->base + HW_PXP_LUT_DATA);
571 } else if ((lut_op & PXP_LUT_BLACK_WHITE) != 0) {
572 /* Fill out LUT table with 8-bit monochromized values */
574 /* clear bypass bit, set lookup mode & out mode */
575 __raw_writel(BF_PXP_LUT_CTRL_LOOKUP_MODE
576 (BV_PXP_LUT_CTRL_LOOKUP_MODE__DIRECT_Y8) |
577 BF_PXP_LUT_CTRL_OUT_MODE
578 (BV_PXP_LUT_CTRL_OUT_MODE__Y8),
579 pxp->base + HW_PXP_LUT_CTRL);
581 /* Initialize LUT address to 0 and set NUM_BYTES to 0 */
582 __raw_writel(0, pxp->base + HW_PXP_LUT_ADDR);
584 /* LUT address pointer auto-increments after each data write */
585 for (pix_val = 0; pix_val < 256; pix_val += 4) {
586 for (i = 0; i < 4; i++) {
587 entry_src = use_cmap ?
588 cmap[pix_val + i] : pix_val + i;
589 entry[i] = (entry_src < 0x80) ? 0x00 : 0xFF;
591 reg_val = (entry[3] << 24) | (entry[2] << 16) |
592 (entry[1] << 8) | entry[0];
593 __raw_writel(reg_val, pxp->base + HW_PXP_LUT_DATA);
595 } else if (use_cmap) {
596 /* Fill out LUT table using colormap values */
598 /* clear bypass bit, set lookup mode & out mode */
599 __raw_writel(BF_PXP_LUT_CTRL_LOOKUP_MODE
600 (BV_PXP_LUT_CTRL_LOOKUP_MODE__DIRECT_Y8) |
601 BF_PXP_LUT_CTRL_OUT_MODE
602 (BV_PXP_LUT_CTRL_OUT_MODE__Y8),
603 pxp->base + HW_PXP_LUT_CTRL);
605 /* Initialize LUT address to 0 and set NUM_BYTES to 0 */
606 __raw_writel(0, pxp->base + HW_PXP_LUT_ADDR);
608 /* LUT address pointer auto-increments after each data write */
609 for (pix_val = 0; pix_val < 256; pix_val += 4) {
610 for (i = 0; i < 4; i++)
611 entry[i] = cmap[pix_val + i];
612 reg_val = (entry[3] << 24) | (entry[2] << 16) |
613 (entry[1] << 8) | entry[0];
614 __raw_writel(reg_val, pxp->base + HW_PXP_LUT_DATA);
618 pxp->lut_state = lut_op;
621 static void pxp_set_csc(struct pxps *pxp)
623 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
624 struct pxp_layer_param *s0_params = &pxp_conf->s0_param;
625 struct pxp_layer_param *ol_params = &pxp_conf->ol_param[0];
626 struct pxp_layer_param *out_params = &pxp_conf->out_param;
628 bool input_is_YUV = is_yuv(s0_params->pixel_fmt);
629 bool output_is_YUV = is_yuv(out_params->pixel_fmt);
631 if (input_is_YUV && output_is_YUV) {
633 * Input = YUV, Output = YUV
634 * No CSC unless we need to do combining
636 if (ol_params->combine_enable) {
637 /* Must convert to RGB for combining with RGB overlay */
639 /* CSC1 - YUV->RGB */
640 __raw_writel(0x04030000, pxp->base + HW_PXP_CSC1_COEF0);
641 __raw_writel(0x01230208, pxp->base + HW_PXP_CSC1_COEF1);
642 __raw_writel(0x076b079c, pxp->base + HW_PXP_CSC1_COEF2);
644 /* CSC2 - RGB->YUV */
645 __raw_writel(0x4, pxp->base + HW_PXP_CSC2_CTRL);
646 __raw_writel(0x0096004D, pxp->base + HW_PXP_CSC2_COEF0);
647 __raw_writel(0x05DA001D, pxp->base + HW_PXP_CSC2_COEF1);
648 __raw_writel(0x007005B6, pxp->base + HW_PXP_CSC2_COEF2);
649 __raw_writel(0x057C009E, pxp->base + HW_PXP_CSC2_COEF3);
650 __raw_writel(0x000005E6, pxp->base + HW_PXP_CSC2_COEF4);
651 __raw_writel(0x00000000, pxp->base + HW_PXP_CSC2_COEF5);
653 /* Input & Output both YUV, so bypass both CSCs */
656 __raw_writel(0x40000000, pxp->base + HW_PXP_CSC1_COEF0);
659 __raw_writel(0x1, pxp->base + HW_PXP_CSC2_CTRL);
661 } else if (input_is_YUV && !output_is_YUV) {
663 * Input = YUV, Output = RGB
664 * Use CSC1 to convert to RGB
667 /* CSC1 - YUV->RGB */
668 __raw_writel(0x84ab01f0, pxp->base + HW_PXP_CSC1_COEF0);
669 __raw_writel(0x01980204, pxp->base + HW_PXP_CSC1_COEF1);
670 __raw_writel(0x0730079c, pxp->base + HW_PXP_CSC1_COEF2);
673 __raw_writel(0x1, pxp->base + HW_PXP_CSC2_CTRL);
674 } else if (!input_is_YUV && output_is_YUV) {
676 * Input = RGB, Output = YUV
677 * Use CSC2 to convert to YUV
681 __raw_writel(0x40000000, pxp->base + HW_PXP_CSC1_COEF0);
683 /* CSC2 - RGB->YUV */
684 __raw_writel(0x4, pxp->base + HW_PXP_CSC2_CTRL);
685 __raw_writel(0x0096004D, pxp->base + HW_PXP_CSC2_COEF0);
686 __raw_writel(0x05DA001D, pxp->base + HW_PXP_CSC2_COEF1);
687 __raw_writel(0x007005B6, pxp->base + HW_PXP_CSC2_COEF2);
688 __raw_writel(0x057C009E, pxp->base + HW_PXP_CSC2_COEF3);
689 __raw_writel(0x000005E6, pxp->base + HW_PXP_CSC2_COEF4);
690 __raw_writel(0x00000000, pxp->base + HW_PXP_CSC2_COEF5);
693 * Input = RGB, Output = RGB
694 * Input & Output both RGB, so bypass both CSCs
698 __raw_writel(0x40000000, pxp->base + HW_PXP_CSC1_COEF0);
701 __raw_writel(0x1, pxp->base + HW_PXP_CSC2_CTRL);
704 /* YCrCb colorspace */
705 /* Not sure when we use this...no YCrCb formats are defined for PxP */
707 __raw_writel(0x84ab01f0, HW_PXP_CSCCOEFF0_ADDR);
708 __raw_writel(0x01230204, HW_PXP_CSCCOEFF1_ADDR);
709 __raw_writel(0x0730079c, HW_PXP_CSCCOEFF2_ADDR);
714 static void pxp_set_s0buf(struct pxps *pxp)
716 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
717 struct pxp_layer_param *s0_params = &pxp_conf->s0_param;
718 struct pxp_proc_data *proc_data = &pxp_conf->proc_data;
720 dma_addr_t Y1, U1, V1;
723 Y = s0_params->paddr;
725 if (s0_params->pixel_fmt == PXP_PIX_FMT_RGB565)
727 else if (s0_params->pixel_fmt == PXP_PIX_FMT_RGB24)
729 offset = (proc_data->srect.top * s0_params->width +
730 proc_data->srect.left) * bpp;
731 /* clipping or cropping */
733 __raw_writel(Y1, pxp->base + HW_PXP_PS_BUF);
734 if ((s0_params->pixel_fmt == PXP_PIX_FMT_YUV420P) ||
735 (s0_params->pixel_fmt == PXP_PIX_FMT_YVU420P) ||
736 (s0_params->pixel_fmt == PXP_PIX_FMT_GREY)) {
737 /* Set to 1 if YUV format is 4:2:2 rather than 4:2:0 */
740 offset = proc_data->srect.top * s0_params->width / 4 +
741 proc_data->srect.left / 2;
742 U = Y + (s0_params->width * s0_params->height);
744 V = U + ((s0_params->width * s0_params->height) >> s);
746 __raw_writel(U1, pxp->base + HW_PXP_PS_UBUF);
747 __raw_writel(V1, pxp->base + HW_PXP_PS_VBUF);
750 /* TODO: only support RGB565, Y8, Y4, YUV420 */
751 if (s0_params->pixel_fmt == PXP_PIX_FMT_GREY ||
752 s0_params->pixel_fmt == PXP_PIX_FMT_YUV420P)
753 __raw_writel(s0_params->width, pxp->base + HW_PXP_PS_PITCH);
754 else if (s0_params->pixel_fmt == PXP_PIX_FMT_GY04)
755 __raw_writel(s0_params->width >> 1,
756 pxp->base + HW_PXP_PS_PITCH);
758 __raw_writel(s0_params->width * 2, pxp->base + HW_PXP_PS_PITCH);
762 * pxp_config() - configure PxP for a processing task
763 * @pxps: PXP context.
764 * @pxp_chan: PXP channel.
765 * @return: 0 on success or negative error code on failure.
767 static int pxp_config(struct pxps *pxp, struct pxp_channel *pxp_chan)
769 struct pxp_config_data *pxp_conf_data = &pxp->pxp_conf_state;
773 /* Configure PxP regs */
775 pxp_set_s0param(pxp);
777 pxp_set_scaling(pxp);
778 ol_nr = pxp_conf_data->layer_nr - 2;
780 i = pxp_conf_data->layer_nr - 2 - ol_nr;
782 pxp_set_olparam(i, pxp);
783 /* only the color key in higher overlay will take effect. */
784 pxp_set_olcolorkey(i, pxp);
787 pxp_set_s0colorkey(pxp);
798 static void pxp_clk_enable(struct pxps *pxp)
800 mutex_lock(&pxp->clk_mutex);
802 if (pxp->clk_stat == CLK_STAT_ON) {
803 mutex_unlock(&pxp->clk_mutex);
807 clk_prepare_enable(pxp->clk);
808 pxp->clk_stat = CLK_STAT_ON;
810 mutex_unlock(&pxp->clk_mutex);
813 static void pxp_clk_disable(struct pxps *pxp)
817 mutex_lock(&pxp->clk_mutex);
819 if (pxp->clk_stat == CLK_STAT_OFF) {
820 mutex_unlock(&pxp->clk_mutex);
824 spin_lock_irqsave(&pxp->lock, flags);
825 if ((pxp->pxp_ongoing == 0) && list_empty(&head)) {
826 spin_unlock_irqrestore(&pxp->lock, flags);
827 clk_disable_unprepare(pxp->clk);
828 pxp->clk_stat = CLK_STAT_OFF;
830 spin_unlock_irqrestore(&pxp->lock, flags);
832 mutex_unlock(&pxp->clk_mutex);
835 static inline void clkoff_callback(struct work_struct *w)
837 struct pxps *pxp = container_of(w, struct pxps, work);
839 pxp_clk_disable(pxp);
842 static void pxp_clkoff_timer(unsigned long arg)
844 struct pxps *pxp = (struct pxps *)arg;
846 if ((pxp->pxp_ongoing == 0) && list_empty(&head))
847 schedule_work(&pxp->work);
849 mod_timer(&pxp->clk_timer,
850 jiffies + msecs_to_jiffies(timeout_in_ms));
853 static struct pxp_tx_desc *pxpdma_first_active(struct pxp_channel *pxp_chan)
855 return list_entry(pxp_chan->active_list.next, struct pxp_tx_desc, list);
858 static struct pxp_tx_desc *pxpdma_first_queued(struct pxp_channel *pxp_chan)
860 return list_entry(pxp_chan->queue.next, struct pxp_tx_desc, list);
863 /* called with pxp_chan->lock held */
864 static void __pxpdma_dostart(struct pxp_channel *pxp_chan)
866 struct pxp_dma *pxp_dma = to_pxp_dma(pxp_chan->dma_chan.device);
867 struct pxps *pxp = to_pxp(pxp_dma);
868 struct pxp_tx_desc *desc;
869 struct pxp_tx_desc *child;
872 /* so far we presume only one transaction on active_list */
874 desc = pxpdma_first_active(pxp_chan);
875 memcpy(&pxp->pxp_conf_state.s0_param,
876 &desc->layer_param.s0_param, sizeof(struct pxp_layer_param));
877 memcpy(&pxp->pxp_conf_state.proc_data,
878 &desc->proc_data, sizeof(struct pxp_proc_data));
880 /* Save PxP configuration */
881 list_for_each_entry(child, &desc->tx_list, list) {
882 if (i == 0) { /* Output */
883 memcpy(&pxp->pxp_conf_state.out_param,
884 &child->layer_param.out_param,
885 sizeof(struct pxp_layer_param));
886 } else { /* Overlay */
887 memcpy(&pxp->pxp_conf_state.ol_param[i - 1],
888 &child->layer_param.ol_param,
889 sizeof(struct pxp_layer_param));
894 pr_debug("%s:%d S0 w/h %d/%d paddr %08x\n", __func__, __LINE__,
895 pxp->pxp_conf_state.s0_param.width,
896 pxp->pxp_conf_state.s0_param.height,
897 pxp->pxp_conf_state.s0_param.paddr);
898 pr_debug("%s:%d OUT w/h %d/%d paddr %08x\n", __func__, __LINE__,
899 pxp->pxp_conf_state.out_param.width,
900 pxp->pxp_conf_state.out_param.height,
901 pxp->pxp_conf_state.out_param.paddr);
904 static void pxpdma_dostart_work(struct pxps *pxp)
906 struct pxp_channel *pxp_chan = NULL;
907 unsigned long flags, flags1;
909 while (__raw_readl(pxp->base + HW_PXP_CTRL) & BM_PXP_CTRL_ENABLE)
912 spin_lock_irqsave(&pxp->lock, flags);
913 if (list_empty(&head)) {
914 pxp->pxp_ongoing = 0;
915 spin_unlock_irqrestore(&pxp->lock, flags);
919 pxp_chan = list_entry(head.next, struct pxp_channel, list);
921 spin_lock_irqsave(&pxp_chan->lock, flags1);
922 if (!list_empty(&pxp_chan->active_list)) {
923 struct pxp_tx_desc *desc;
925 desc = pxpdma_first_active(pxp_chan);
926 __pxpdma_dostart(pxp_chan);
928 spin_unlock_irqrestore(&pxp_chan->lock, flags1);
931 pxp_config(pxp, pxp_chan);
935 spin_unlock_irqrestore(&pxp->lock, flags);
938 static void pxpdma_dequeue(struct pxp_channel *pxp_chan, struct list_head *list)
940 struct pxp_tx_desc *desc = NULL;
942 desc = pxpdma_first_queued(pxp_chan);
943 list_move_tail(&desc->list, list);
944 } while (!list_empty(&pxp_chan->queue));
947 static dma_cookie_t pxp_tx_submit(struct dma_async_tx_descriptor *tx)
949 struct pxp_tx_desc *desc = to_tx_desc(tx);
950 struct pxp_channel *pxp_chan = to_pxp_channel(tx->chan);
954 dev_dbg(&pxp_chan->dma_chan.dev->device, "received TX\n");
956 mutex_lock(&pxp_chan->chan_mutex);
958 cookie = pxp_chan->dma_chan.cookie;
963 /* from dmaengine.h: "last cookie value returned to client" */
964 pxp_chan->dma_chan.cookie = cookie;
967 /* pxp_chan->lock can be taken under ichan->lock, but not v.v. */
968 spin_lock_irqsave(&pxp_chan->lock, flags);
970 /* Here we add the tx descriptor to our PxP task queue. */
971 list_add_tail(&desc->list, &pxp_chan->queue);
973 spin_unlock_irqrestore(&pxp_chan->lock, flags);
975 dev_dbg(&pxp_chan->dma_chan.dev->device, "done TX\n");
977 mutex_unlock(&pxp_chan->chan_mutex);
981 /* Called with pxp_chan->chan_mutex held */
982 static int pxp_desc_alloc(struct pxp_channel *pxp_chan, int n)
984 struct pxp_tx_desc *desc = vmalloc(n * sizeof(struct pxp_tx_desc));
989 pxp_chan->n_tx_desc = n;
990 pxp_chan->desc = desc;
991 INIT_LIST_HEAD(&pxp_chan->active_list);
992 INIT_LIST_HEAD(&pxp_chan->queue);
993 INIT_LIST_HEAD(&pxp_chan->free_list);
996 struct dma_async_tx_descriptor *txd = &desc->txd;
998 memset(txd, 0, sizeof(*txd));
999 INIT_LIST_HEAD(&desc->tx_list);
1000 dma_async_tx_descriptor_init(txd, &pxp_chan->dma_chan);
1001 txd->tx_submit = pxp_tx_submit;
1003 list_add(&desc->list, &pxp_chan->free_list);
1012 * pxp_init_channel() - initialize a PXP channel.
1013 * @pxp_dma: PXP DMA context.
1014 * @pchan: pointer to the channel object.
1015 * @return 0 on success or negative error code on failure.
1017 static int pxp_init_channel(struct pxp_dma *pxp_dma,
1018 struct pxp_channel *pxp_chan)
1020 unsigned long flags;
1021 struct pxps *pxp = to_pxp(pxp_dma);
1022 int ret = 0, n_desc = 0;
1025 * We are using _virtual_ channel here.
1026 * Each channel contains all parameters of corresponding layers
1027 * for one transaction; each layer is represented as one descriptor
1028 * (i.e., pxp_tx_desc) here.
1031 spin_lock_irqsave(&pxp->lock, flags);
1033 /* max desc nr: S0+OL+OUT = 1+8+1 */
1036 spin_unlock_irqrestore(&pxp->lock, flags);
1038 if (n_desc && !pxp_chan->desc)
1039 ret = pxp_desc_alloc(pxp_chan, n_desc);
1045 * pxp_uninit_channel() - uninitialize a PXP channel.
1046 * @pxp_dma: PXP DMA context.
1047 * @pchan: pointer to the channel object.
1048 * @return 0 on success or negative error code on failure.
1050 static int pxp_uninit_channel(struct pxp_dma *pxp_dma,
1051 struct pxp_channel *pxp_chan)
1056 vfree(pxp_chan->desc);
1058 pxp_chan->desc = NULL;
1063 static irqreturn_t pxp_irq(int irq, void *dev_id)
1065 struct pxps *pxp = dev_id;
1066 struct pxp_channel *pxp_chan;
1067 struct pxp_tx_desc *desc;
1068 dma_async_tx_callback callback;
1069 void *callback_param;
1070 unsigned long flags;
1076 __raw_readl(pxp->base + HW_PXP_HIST_CTRL) & BM_PXP_HIST_CTRL_STATUS;
1078 __raw_writel(BM_PXP_STAT_IRQ, pxp->base + HW_PXP_STAT_CLR);
1080 spin_lock_irqsave(&pxp->lock, flags);
1082 if (list_empty(&head)) {
1083 pxp->pxp_ongoing = 0;
1084 spin_unlock_irqrestore(&pxp->lock, flags);
1088 pxp_chan = list_entry(head.next, struct pxp_channel, list);
1089 list_del_init(&pxp_chan->list);
1091 if (list_empty(&pxp_chan->active_list)) {
1092 pr_debug("PXP_IRQ pxp_chan->active_list empty. chan_id %d\n",
1093 pxp_chan->dma_chan.chan_id);
1094 pxp->pxp_ongoing = 0;
1095 spin_unlock_irqrestore(&pxp->lock, flags);
1099 /* Get descriptor and call callback */
1100 desc = pxpdma_first_active(pxp_chan);
1102 pxp_chan->completed = desc->txd.cookie;
1104 callback = desc->txd.callback;
1105 callback_param = desc->txd.callback_param;
1107 /* Send histogram status back to caller */
1108 desc->hist_status = hist_status;
1110 if ((desc->txd.flags & DMA_PREP_INTERRUPT) && callback)
1111 callback(callback_param);
1113 pxp_chan->status = PXP_CHANNEL_INITIALIZED;
1115 list_splice_init(&desc->tx_list, &pxp_chan->free_list);
1116 list_move(&desc->list, &pxp_chan->free_list);
1118 wake_up(&pxp->done);
1119 pxp->pxp_ongoing = 0;
1120 mod_timer(&pxp->clk_timer, jiffies + msecs_to_jiffies(timeout_in_ms));
1122 spin_unlock_irqrestore(&pxp->lock, flags);
1127 /* called with pxp_chan->lock held */
1128 static struct pxp_tx_desc *pxpdma_desc_get(struct pxp_channel *pxp_chan)
1130 struct pxp_tx_desc *desc, *_desc;
1131 struct pxp_tx_desc *ret = NULL;
1133 list_for_each_entry_safe(desc, _desc, &pxp_chan->free_list, list) {
1134 list_del_init(&desc->list);
1142 /* called with pxp_chan->lock held */
1143 static void pxpdma_desc_put(struct pxp_channel *pxp_chan,
1144 struct pxp_tx_desc *desc)
1147 struct device *dev = &pxp_chan->dma_chan.dev->device;
1148 struct pxp_tx_desc *child;
1150 list_for_each_entry(child, &desc->tx_list, list)
1151 dev_info(dev, "moving child desc %p to freelist\n", child);
1152 list_splice_init(&desc->tx_list, &pxp_chan->free_list);
1153 dev_info(dev, "moving desc %p to freelist\n", desc);
1154 list_add(&desc->list, &pxp_chan->free_list);
1158 /* Allocate and initialise a transfer descriptor. */
1159 static struct dma_async_tx_descriptor *pxp_prep_slave_sg(struct dma_chan *chan,
1162 unsigned int sg_len,
1164 dma_transfer_direction
1166 unsigned long tx_flags,
1169 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1170 struct pxp_dma *pxp_dma = to_pxp_dma(chan->device);
1171 struct pxps *pxp = to_pxp(pxp_dma);
1172 struct pxp_tx_desc *desc = NULL;
1173 struct pxp_tx_desc *first = NULL, *prev = NULL;
1174 struct scatterlist *sg;
1175 unsigned long flags;
1176 dma_addr_t phys_addr;
1179 if (direction != DMA_DEV_TO_MEM && direction != DMA_MEM_TO_DEV) {
1180 dev_err(chan->device->dev, "Invalid DMA direction %d!\n",
1185 if (unlikely(sg_len < 2))
1188 spin_lock_irqsave(&pxp_chan->lock, flags);
1189 for_each_sg(sgl, sg, sg_len, i) {
1190 desc = pxpdma_desc_get(pxp_chan);
1192 pxpdma_desc_put(pxp_chan, first);
1193 dev_err(chan->device->dev, "Can't get DMA desc.\n");
1194 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1198 phys_addr = sg_dma_address(sg);
1203 desc->layer_param.s0_param.paddr = phys_addr;
1205 list_add_tail(&desc->list, &first->tx_list);
1210 desc->layer_param.out_param.paddr = phys_addr;
1212 desc->layer_param.ol_param.paddr = phys_addr;
1217 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1219 pxp->pxp_conf_state.layer_nr = sg_len;
1220 first->txd.flags = tx_flags;
1221 first->len = sg_len;
1222 pr_debug("%s:%d first %p, first->len %d, flags %08x\n",
1223 __func__, __LINE__, first, first->len, first->txd.flags);
1228 static void pxp_issue_pending(struct dma_chan *chan)
1230 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1231 struct pxp_dma *pxp_dma = to_pxp_dma(chan->device);
1232 struct pxps *pxp = to_pxp(pxp_dma);
1233 unsigned long flags0, flags;
1235 spin_lock_irqsave(&pxp->lock, flags0);
1236 spin_lock_irqsave(&pxp_chan->lock, flags);
1238 if (!list_empty(&pxp_chan->queue)) {
1239 pxpdma_dequeue(pxp_chan, &pxp_chan->active_list);
1240 pxp_chan->status = PXP_CHANNEL_READY;
1241 list_add_tail(&pxp_chan->list, &head);
1243 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1244 spin_unlock_irqrestore(&pxp->lock, flags0);
1247 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1248 spin_unlock_irqrestore(&pxp->lock, flags0);
1250 pxp_clk_enable(pxp);
1251 if (!wait_event_interruptible_timeout(pxp->done, PXP_WAITCON, 2 * HZ) ||
1252 signal_pending(current)) {
1253 pxp_clk_disable(pxp);
1257 spin_lock_irqsave(&pxp->lock, flags);
1258 pxp->pxp_ongoing = 1;
1259 spin_unlock_irqrestore(&pxp->lock, flags);
1260 pxpdma_dostart_work(pxp);
1263 static void __pxp_terminate_all(struct dma_chan *chan)
1265 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1266 unsigned long flags;
1268 /* pchan->queue is modified in ISR, have to spinlock */
1269 spin_lock_irqsave(&pxp_chan->lock, flags);
1270 list_splice_init(&pxp_chan->queue, &pxp_chan->free_list);
1271 list_splice_init(&pxp_chan->active_list, &pxp_chan->free_list);
1273 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1275 pxp_chan->status = PXP_CHANNEL_INITIALIZED;
1278 static int pxp_control(struct dma_chan *chan, enum dma_ctrl_cmd cmd,
1281 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1283 /* Only supports DMA_TERMINATE_ALL */
1284 if (cmd != DMA_TERMINATE_ALL)
1287 mutex_lock(&pxp_chan->chan_mutex);
1288 __pxp_terminate_all(chan);
1289 mutex_unlock(&pxp_chan->chan_mutex);
1294 static int pxp_alloc_chan_resources(struct dma_chan *chan)
1296 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1297 struct pxp_dma *pxp_dma = to_pxp_dma(chan->device);
1300 /* dmaengine.c now guarantees to only offer free channels */
1301 BUG_ON(chan->client_count > 1);
1302 WARN_ON(pxp_chan->status != PXP_CHANNEL_FREE);
1305 pxp_chan->completed = -ENXIO;
1307 pr_debug("%s dma_chan.chan_id %d\n", __func__, chan->chan_id);
1308 ret = pxp_init_channel(pxp_dma, pxp_chan);
1312 pxp_chan->status = PXP_CHANNEL_INITIALIZED;
1314 dev_dbg(&chan->dev->device, "Found channel 0x%x, irq %d\n",
1315 chan->chan_id, pxp_chan->eof_irq);
1323 static void pxp_free_chan_resources(struct dma_chan *chan)
1325 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1326 struct pxp_dma *pxp_dma = to_pxp_dma(chan->device);
1328 mutex_lock(&pxp_chan->chan_mutex);
1330 __pxp_terminate_all(chan);
1332 pxp_chan->status = PXP_CHANNEL_FREE;
1334 pxp_uninit_channel(pxp_dma, pxp_chan);
1336 mutex_unlock(&pxp_chan->chan_mutex);
1339 static enum dma_status pxp_tx_status(struct dma_chan *chan,
1340 dma_cookie_t cookie,
1341 struct dma_tx_state *txstate)
1343 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1345 if (cookie != chan->cookie)
1349 txstate->last = pxp_chan->completed;
1350 txstate->used = chan->cookie;
1351 txstate->residue = 0;
1356 static int pxp_hw_init(struct pxps *pxp)
1358 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
1359 struct pxp_proc_data *proc_data = &pxp_conf->proc_data;
1362 /* Pull PxP out of reset */
1363 __raw_writel(0, pxp->base + HW_PXP_CTRL);
1365 /* Config defaults */
1367 /* Initialize non-channel-specific PxP parameters */
1368 proc_data->drect.left = proc_data->srect.left = 0;
1369 proc_data->drect.top = proc_data->srect.top = 0;
1370 proc_data->drect.width = proc_data->srect.width = 0;
1371 proc_data->drect.height = proc_data->srect.height = 0;
1372 proc_data->scaling = 0;
1373 proc_data->hflip = 0;
1374 proc_data->vflip = 0;
1375 proc_data->rotate = 0;
1376 proc_data->bgcolor = 0;
1378 /* Initialize S0 channel parameters */
1379 pxp_conf->s0_param.pixel_fmt = pxp_s0_formats[0];
1380 pxp_conf->s0_param.width = 0;
1381 pxp_conf->s0_param.height = 0;
1382 pxp_conf->s0_param.color_key = -1;
1383 pxp_conf->s0_param.color_key_enable = false;
1385 /* Initialize OL channel parameters */
1386 pxp_conf->ol_param[0].combine_enable = false;
1387 pxp_conf->ol_param[0].width = 0;
1388 pxp_conf->ol_param[0].height = 0;
1389 pxp_conf->ol_param[0].pixel_fmt = PXP_PIX_FMT_RGB565;
1390 pxp_conf->ol_param[0].color_key_enable = false;
1391 pxp_conf->ol_param[0].color_key = -1;
1392 pxp_conf->ol_param[0].global_alpha_enable = false;
1393 pxp_conf->ol_param[0].global_alpha = 0;
1394 pxp_conf->ol_param[0].local_alpha_enable = false;
1396 /* Initialize Output channel parameters */
1397 pxp_conf->out_param.width = 0;
1398 pxp_conf->out_param.height = 0;
1399 pxp_conf->out_param.pixel_fmt = PXP_PIX_FMT_RGB565;
1401 proc_data->overlay_state = 0;
1403 /* Write default h/w config */
1405 pxp_set_s0param(pxp);
1406 pxp_set_s0crop(pxp);
1408 * simply program the ULC to a higher value than the LRC
1409 * to avoid any AS pixels to show up in the output buffer.
1411 __raw_writel(0xFFFFFFFF, pxp->base + HW_PXP_OUT_AS_ULC);
1412 pxp_set_olparam(0, pxp);
1413 pxp_set_olcolorkey(0, pxp);
1415 pxp_set_s0colorkey(pxp);
1420 /* One-time histogram configuration */
1422 BF_PXP_HIST_CTRL_PANEL_MODE(BV_PXP_HIST_CTRL_PANEL_MODE__GRAY16);
1423 __raw_writel(reg_val, pxp->base + HW_PXP_HIST_CTRL);
1425 reg_val = BF_PXP_HIST2_PARAM_VALUE0(0x00) |
1426 BF_PXP_HIST2_PARAM_VALUE1(0x00F);
1427 __raw_writel(reg_val, pxp->base + HW_PXP_HIST2_PARAM);
1429 reg_val = BF_PXP_HIST4_PARAM_VALUE0(0x00) |
1430 BF_PXP_HIST4_PARAM_VALUE1(0x05) |
1431 BF_PXP_HIST4_PARAM_VALUE2(0x0A) | BF_PXP_HIST4_PARAM_VALUE3(0x0F);
1432 __raw_writel(reg_val, pxp->base + HW_PXP_HIST4_PARAM);
1434 reg_val = BF_PXP_HIST8_PARAM0_VALUE0(0x00) |
1435 BF_PXP_HIST8_PARAM0_VALUE1(0x02) |
1436 BF_PXP_HIST8_PARAM0_VALUE2(0x04) | BF_PXP_HIST8_PARAM0_VALUE3(0x06);
1437 __raw_writel(reg_val, pxp->base + HW_PXP_HIST8_PARAM0);
1438 reg_val = BF_PXP_HIST8_PARAM1_VALUE4(0x09) |
1439 BF_PXP_HIST8_PARAM1_VALUE5(0x0B) |
1440 BF_PXP_HIST8_PARAM1_VALUE6(0x0D) | BF_PXP_HIST8_PARAM1_VALUE7(0x0F);
1441 __raw_writel(reg_val, pxp->base + HW_PXP_HIST8_PARAM1);
1443 reg_val = BF_PXP_HIST16_PARAM0_VALUE0(0x00) |
1444 BF_PXP_HIST16_PARAM0_VALUE1(0x01) |
1445 BF_PXP_HIST16_PARAM0_VALUE2(0x02) |
1446 BF_PXP_HIST16_PARAM0_VALUE3(0x03);
1447 __raw_writel(reg_val, pxp->base + HW_PXP_HIST16_PARAM0);
1448 reg_val = BF_PXP_HIST16_PARAM1_VALUE4(0x04) |
1449 BF_PXP_HIST16_PARAM1_VALUE5(0x05) |
1450 BF_PXP_HIST16_PARAM1_VALUE6(0x06) |
1451 BF_PXP_HIST16_PARAM1_VALUE7(0x07);
1452 __raw_writel(reg_val, pxp->base + HW_PXP_HIST16_PARAM1);
1453 reg_val = BF_PXP_HIST16_PARAM2_VALUE8(0x08) |
1454 BF_PXP_HIST16_PARAM2_VALUE9(0x09) |
1455 BF_PXP_HIST16_PARAM2_VALUE10(0x0A) |
1456 BF_PXP_HIST16_PARAM2_VALUE11(0x0B);
1457 __raw_writel(reg_val, pxp->base + HW_PXP_HIST16_PARAM2);
1458 reg_val = BF_PXP_HIST16_PARAM3_VALUE12(0x0C) |
1459 BF_PXP_HIST16_PARAM3_VALUE13(0x0D) |
1460 BF_PXP_HIST16_PARAM3_VALUE14(0x0E) |
1461 BF_PXP_HIST16_PARAM3_VALUE15(0x0F);
1462 __raw_writel(reg_val, pxp->base + HW_PXP_HIST16_PARAM3);
1467 static int pxp_dma_init(struct pxps *pxp)
1469 struct pxp_dma *pxp_dma = &pxp->pxp_dma;
1470 struct dma_device *dma = &pxp_dma->dma;
1473 dma_cap_set(DMA_SLAVE, dma->cap_mask);
1474 dma_cap_set(DMA_PRIVATE, dma->cap_mask);
1476 /* Compulsory common fields */
1477 dma->dev = pxp->dev;
1478 dma->device_alloc_chan_resources = pxp_alloc_chan_resources;
1479 dma->device_free_chan_resources = pxp_free_chan_resources;
1480 dma->device_tx_status = pxp_tx_status;
1481 dma->device_issue_pending = pxp_issue_pending;
1483 /* Compulsory for DMA_SLAVE fields */
1484 dma->device_prep_slave_sg = pxp_prep_slave_sg;
1485 dma->device_control = pxp_control;
1487 /* Initialize PxP Channels */
1488 INIT_LIST_HEAD(&dma->channels);
1489 for (i = 0; i < NR_PXP_VIRT_CHANNEL; i++) {
1490 struct pxp_channel *pxp_chan = pxp->channel + i;
1491 struct dma_chan *dma_chan = &pxp_chan->dma_chan;
1493 spin_lock_init(&pxp_chan->lock);
1494 mutex_init(&pxp_chan->chan_mutex);
1496 /* Only one EOF IRQ for PxP, shared by all channels */
1497 pxp_chan->eof_irq = pxp->irq;
1498 pxp_chan->status = PXP_CHANNEL_FREE;
1499 pxp_chan->completed = -ENXIO;
1500 snprintf(pxp_chan->eof_name, sizeof(pxp_chan->eof_name),
1503 dma_chan->device = &pxp_dma->dma;
1504 dma_chan->cookie = 1;
1505 dma_chan->chan_id = i;
1506 list_add_tail(&dma_chan->device_node, &dma->channels);
1509 return dma_async_device_register(&pxp_dma->dma);
1512 static ssize_t clk_off_timeout_show(struct device *dev,
1513 struct device_attribute *attr, char *buf)
1515 return sprintf(buf, "%d\n", timeout_in_ms);
1518 static ssize_t clk_off_timeout_store(struct device *dev,
1519 struct device_attribute *attr,
1520 const char *buf, size_t count)
1523 if (sscanf(buf, "%d", &val) > 0) {
1524 timeout_in_ms = val;
1530 static DEVICE_ATTR(clk_off_timeout, 0644, clk_off_timeout_show,
1531 clk_off_timeout_store);
1533 static const struct of_device_id imx_pxpdma_dt_ids[] = {
1534 { .compatible = "fsl,imx6dl-pxp-dma", },
1537 MODULE_DEVICE_TABLE(of, imx_pxpdma_dt_ids);
1539 static int pxp_probe(struct platform_device *pdev)
1542 struct resource *res;
1546 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1547 irq = platform_get_irq(pdev, 0);
1548 if (!res || irq < 0) {
1553 pxp = devm_kzalloc(&pdev->dev, sizeof(*pxp), GFP_KERNEL);
1555 dev_err(&pdev->dev, "failed to allocate control object\n");
1560 pxp->dev = &pdev->dev;
1562 platform_set_drvdata(pdev, pxp);
1565 pxp->pxp_ongoing = 0;
1568 spin_lock_init(&pxp->lock);
1569 mutex_init(&pxp->clk_mutex);
1571 pxp->base = devm_request_and_ioremap(&pdev->dev, res);
1572 if (pxp->base == NULL) {
1573 dev_err(&pdev->dev, "Couldn't ioremap regs\n");
1580 pxp->clk = devm_clk_get(&pdev->dev, "pxp-axi");
1581 clk_prepare_enable(pxp->clk);
1583 err = pxp_hw_init(pxp);
1584 clk_disable_unprepare(pxp->clk);
1586 dev_err(&pdev->dev, "failed to initialize hardware\n");
1590 err = devm_request_irq(&pdev->dev, pxp->irq, pxp_irq, 0,
1591 "pxp-dmaengine", pxp);
1594 /* Initialize DMA engine */
1595 err = pxp_dma_init(pxp);
1599 if (device_create_file(&pdev->dev, &dev_attr_clk_off_timeout)) {
1601 "Unable to create file from clk_off_timeout\n");
1606 INIT_WORK(&pxp->work, clkoff_callback);
1607 init_waitqueue_head(&pxp->done);
1608 init_timer(&pxp->clk_timer);
1609 pxp->clk_timer.function = pxp_clkoff_timer;
1610 pxp->clk_timer.data = (unsigned long)pxp;
1612 register_pxp_device();
1616 dev_err(&pdev->dev, "Exiting (unsuccessfully) pxp_probe()\n");
1620 static int pxp_remove(struct platform_device *pdev)
1622 struct pxps *pxp = platform_get_drvdata(pdev);
1624 unregister_pxp_device();
1625 cancel_work_sync(&pxp->work);
1626 del_timer_sync(&pxp->clk_timer);
1627 clk_disable_unprepare(pxp->clk);
1628 device_remove_file(&pdev->dev, &dev_attr_clk_off_timeout);
1634 static int pxp_suspend(struct platform_device *pdev, pm_message_t state)
1636 struct pxps *pxp = platform_get_drvdata(pdev);
1638 pxp_clk_enable(pxp);
1639 while (__raw_readl(pxp->base + HW_PXP_CTRL) & BM_PXP_CTRL_ENABLE)
1642 __raw_writel(BM_PXP_CTRL_SFTRST, pxp->base + HW_PXP_CTRL);
1643 pxp_clk_disable(pxp);
1648 static int pxp_resume(struct platform_device *pdev)
1650 struct pxps *pxp = platform_get_drvdata(pdev);
1652 pxp_clk_enable(pxp);
1653 /* Pull PxP out of reset */
1654 __raw_writel(0, pxp->base + HW_PXP_CTRL);
1655 pxp_clk_disable(pxp);
1660 #define pxp_suspend NULL
1661 #define pxp_resume NULL
1664 static struct platform_driver pxp_driver = {
1667 .of_match_table = of_match_ptr(imx_pxpdma_dt_ids),
1670 .remove = pxp_remove,
1671 .suspend = pxp_suspend,
1672 .resume = pxp_resume,
1675 module_platform_driver(pxp_driver);
1678 MODULE_DESCRIPTION("i.MX PxP driver");
1679 MODULE_AUTHOR("Freescale Semiconductor, Inc.");
1680 MODULE_LICENSE("GPL");