2 * Copyright (C) 2010-2013 Freescale Semiconductor, Inc.
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License as published by
6 * the Free Software Foundation; either version 2 of the License, or
7 * (at your option) any later version.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, write to the Free Software
16 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 * Based on STMP378X PxP driver
21 * Copyright 2008-2009 Embedded Alley Solutions, Inc All Rights Reserved.
24 #include <linux/dma-mapping.h>
25 #include <linux/init.h>
26 #include <linux/interrupt.h>
28 #include <linux/kernel.h>
29 #include <linux/module.h>
30 #include <linux/mutex.h>
31 #include <linux/platform_device.h>
32 #include <linux/slab.h>
33 #include <linux/vmalloc.h>
34 #include <linux/dmaengine.h>
35 #include <linux/pxp_dma.h>
36 #include <linux/timer.h>
37 #include <linux/clk.h>
38 #include <linux/workqueue.h>
39 #include <linux/sched.h>
41 #include <linux/kthread.h>
43 #include "regs-pxp_v2.h"
45 #define PXP_DOWNSCALE_THRESHOLD 0x4000
47 static LIST_HEAD(head);
48 static int timeout_in_ms = 600;
49 static unsigned int block_size;
52 struct dma_device dma;
56 struct platform_device *pdev;
59 int irq; /* PXP IRQ to the CPU */
62 struct mutex clk_mutex;
64 #define CLK_STAT_OFF 0
70 struct pxp_dma pxp_dma;
71 struct pxp_channel channel[NR_PXP_VIRT_CHANNEL];
72 struct work_struct work;
74 /* describes most recent processing configuration */
75 struct pxp_config_data pxp_conf_state;
77 /* to turn clock off when pxp is inactive */
78 struct timer_list clk_timer;
80 /* for pxp config dispatch asynchronously*/
81 struct task_struct *dispatch;
82 wait_queue_head_t thread_waitq;
83 struct completion complete;
86 #define to_pxp_dma(d) container_of(d, struct pxp_dma, dma)
87 #define to_tx_desc(tx) container_of(tx, struct pxp_tx_desc, txd)
88 #define to_pxp_channel(d) container_of(d, struct pxp_channel, dma_chan)
89 #define to_pxp(id) container_of(id, struct pxps, pxp_dma)
91 #define PXP_DEF_BUFS 2
94 static uint32_t pxp_s0_formats[] = {
103 * PXP common functions
105 static void dump_pxp_reg(struct pxps *pxp)
107 dev_dbg(pxp->dev, "PXP_CTRL 0x%x",
108 __raw_readl(pxp->base + HW_PXP_CTRL));
109 dev_dbg(pxp->dev, "PXP_STAT 0x%x",
110 __raw_readl(pxp->base + HW_PXP_STAT));
111 dev_dbg(pxp->dev, "PXP_OUT_CTRL 0x%x",
112 __raw_readl(pxp->base + HW_PXP_OUT_CTRL));
113 dev_dbg(pxp->dev, "PXP_OUT_BUF 0x%x",
114 __raw_readl(pxp->base + HW_PXP_OUT_BUF));
115 dev_dbg(pxp->dev, "PXP_OUT_BUF2 0x%x",
116 __raw_readl(pxp->base + HW_PXP_OUT_BUF2));
117 dev_dbg(pxp->dev, "PXP_OUT_PITCH 0x%x",
118 __raw_readl(pxp->base + HW_PXP_OUT_PITCH));
119 dev_dbg(pxp->dev, "PXP_OUT_LRC 0x%x",
120 __raw_readl(pxp->base + HW_PXP_OUT_LRC));
121 dev_dbg(pxp->dev, "PXP_OUT_PS_ULC 0x%x",
122 __raw_readl(pxp->base + HW_PXP_OUT_PS_ULC));
123 dev_dbg(pxp->dev, "PXP_OUT_PS_LRC 0x%x",
124 __raw_readl(pxp->base + HW_PXP_OUT_PS_LRC));
125 dev_dbg(pxp->dev, "PXP_OUT_AS_ULC 0x%x",
126 __raw_readl(pxp->base + HW_PXP_OUT_AS_ULC));
127 dev_dbg(pxp->dev, "PXP_OUT_AS_LRC 0x%x",
128 __raw_readl(pxp->base + HW_PXP_OUT_AS_LRC));
129 dev_dbg(pxp->dev, "PXP_PS_CTRL 0x%x",
130 __raw_readl(pxp->base + HW_PXP_PS_CTRL));
131 dev_dbg(pxp->dev, "PXP_PS_BUF 0x%x",
132 __raw_readl(pxp->base + HW_PXP_PS_BUF));
133 dev_dbg(pxp->dev, "PXP_PS_UBUF 0x%x",
134 __raw_readl(pxp->base + HW_PXP_PS_UBUF));
135 dev_dbg(pxp->dev, "PXP_PS_VBUF 0x%x",
136 __raw_readl(pxp->base + HW_PXP_PS_VBUF));
137 dev_dbg(pxp->dev, "PXP_PS_PITCH 0x%x",
138 __raw_readl(pxp->base + HW_PXP_PS_PITCH));
139 dev_dbg(pxp->dev, "PXP_PS_BACKGROUND 0x%x",
140 __raw_readl(pxp->base + HW_PXP_PS_BACKGROUND));
141 dev_dbg(pxp->dev, "PXP_PS_SCALE 0x%x",
142 __raw_readl(pxp->base + HW_PXP_PS_SCALE));
143 dev_dbg(pxp->dev, "PXP_PS_OFFSET 0x%x",
144 __raw_readl(pxp->base + HW_PXP_PS_OFFSET));
145 dev_dbg(pxp->dev, "PXP_PS_CLRKEYLOW 0x%x",
146 __raw_readl(pxp->base + HW_PXP_PS_CLRKEYLOW));
147 dev_dbg(pxp->dev, "PXP_PS_CLRKEYHIGH 0x%x",
148 __raw_readl(pxp->base + HW_PXP_PS_CLRKEYHIGH));
149 dev_dbg(pxp->dev, "PXP_AS_CTRL 0x%x",
150 __raw_readl(pxp->base + HW_PXP_AS_CTRL));
151 dev_dbg(pxp->dev, "PXP_AS_BUF 0x%x",
152 __raw_readl(pxp->base + HW_PXP_AS_BUF));
153 dev_dbg(pxp->dev, "PXP_AS_PITCH 0x%x",
154 __raw_readl(pxp->base + HW_PXP_AS_PITCH));
155 dev_dbg(pxp->dev, "PXP_AS_CLRKEYLOW 0x%x",
156 __raw_readl(pxp->base + HW_PXP_AS_CLRKEYLOW));
157 dev_dbg(pxp->dev, "PXP_AS_CLRKEYHIGH 0x%x",
158 __raw_readl(pxp->base + HW_PXP_AS_CLRKEYHIGH));
159 dev_dbg(pxp->dev, "PXP_CSC1_COEF0 0x%x",
160 __raw_readl(pxp->base + HW_PXP_CSC1_COEF0));
161 dev_dbg(pxp->dev, "PXP_CSC1_COEF1 0x%x",
162 __raw_readl(pxp->base + HW_PXP_CSC1_COEF1));
163 dev_dbg(pxp->dev, "PXP_CSC1_COEF2 0x%x",
164 __raw_readl(pxp->base + HW_PXP_CSC1_COEF2));
165 dev_dbg(pxp->dev, "PXP_CSC2_CTRL 0x%x",
166 __raw_readl(pxp->base + HW_PXP_CSC2_CTRL));
167 dev_dbg(pxp->dev, "PXP_CSC2_COEF0 0x%x",
168 __raw_readl(pxp->base + HW_PXP_CSC2_COEF0));
169 dev_dbg(pxp->dev, "PXP_CSC2_COEF1 0x%x",
170 __raw_readl(pxp->base + HW_PXP_CSC2_COEF1));
171 dev_dbg(pxp->dev, "PXP_CSC2_COEF2 0x%x",
172 __raw_readl(pxp->base + HW_PXP_CSC2_COEF2));
173 dev_dbg(pxp->dev, "PXP_CSC2_COEF3 0x%x",
174 __raw_readl(pxp->base + HW_PXP_CSC2_COEF3));
175 dev_dbg(pxp->dev, "PXP_CSC2_COEF4 0x%x",
176 __raw_readl(pxp->base + HW_PXP_CSC2_COEF4));
177 dev_dbg(pxp->dev, "PXP_CSC2_COEF5 0x%x",
178 __raw_readl(pxp->base + HW_PXP_CSC2_COEF5));
179 dev_dbg(pxp->dev, "PXP_LUT_CTRL 0x%x",
180 __raw_readl(pxp->base + HW_PXP_LUT_CTRL));
181 dev_dbg(pxp->dev, "PXP_LUT_ADDR 0x%x",
182 __raw_readl(pxp->base + HW_PXP_LUT_ADDR));
183 dev_dbg(pxp->dev, "PXP_LUT_DATA 0x%x",
184 __raw_readl(pxp->base + HW_PXP_LUT_DATA));
185 dev_dbg(pxp->dev, "PXP_LUT_EXTMEM 0x%x",
186 __raw_readl(pxp->base + HW_PXP_LUT_EXTMEM));
187 dev_dbg(pxp->dev, "PXP_CFA 0x%x",
188 __raw_readl(pxp->base + HW_PXP_CFA));
189 dev_dbg(pxp->dev, "PXP_HIST_CTRL 0x%x",
190 __raw_readl(pxp->base + HW_PXP_HIST_CTRL));
191 dev_dbg(pxp->dev, "PXP_HIST2_PARAM 0x%x",
192 __raw_readl(pxp->base + HW_PXP_HIST2_PARAM));
193 dev_dbg(pxp->dev, "PXP_HIST4_PARAM 0x%x",
194 __raw_readl(pxp->base + HW_PXP_HIST4_PARAM));
195 dev_dbg(pxp->dev, "PXP_HIST8_PARAM0 0x%x",
196 __raw_readl(pxp->base + HW_PXP_HIST8_PARAM0));
197 dev_dbg(pxp->dev, "PXP_HIST8_PARAM1 0x%x",
198 __raw_readl(pxp->base + HW_PXP_HIST8_PARAM1));
199 dev_dbg(pxp->dev, "PXP_HIST16_PARAM0 0x%x",
200 __raw_readl(pxp->base + HW_PXP_HIST16_PARAM0));
201 dev_dbg(pxp->dev, "PXP_HIST16_PARAM1 0x%x",
202 __raw_readl(pxp->base + HW_PXP_HIST16_PARAM1));
203 dev_dbg(pxp->dev, "PXP_HIST16_PARAM2 0x%x",
204 __raw_readl(pxp->base + HW_PXP_HIST16_PARAM2));
205 dev_dbg(pxp->dev, "PXP_HIST16_PARAM3 0x%x",
206 __raw_readl(pxp->base + HW_PXP_HIST16_PARAM3));
207 dev_dbg(pxp->dev, "PXP_POWER 0x%x",
208 __raw_readl(pxp->base + HW_PXP_POWER));
209 dev_dbg(pxp->dev, "PXP_NEXT 0x%x",
210 __raw_readl(pxp->base + HW_PXP_NEXT));
211 dev_dbg(pxp->dev, "PXP_DEBUGCTRL 0x%x",
212 __raw_readl(pxp->base + HW_PXP_DEBUGCTRL));
213 dev_dbg(pxp->dev, "PXP_DEBUG 0x%x",
214 __raw_readl(pxp->base + HW_PXP_DEBUG));
215 dev_dbg(pxp->dev, "PXP_VERSION 0x%x",
216 __raw_readl(pxp->base + HW_PXP_VERSION));
219 static bool is_yuv(u32 pix_fmt)
221 if ((pix_fmt == PXP_PIX_FMT_YUYV) |
222 (pix_fmt == PXP_PIX_FMT_UYVY) |
223 (pix_fmt == PXP_PIX_FMT_YVYU) |
224 (pix_fmt == PXP_PIX_FMT_VYUY) |
225 (pix_fmt == PXP_PIX_FMT_Y41P) |
226 (pix_fmt == PXP_PIX_FMT_YUV444) |
227 (pix_fmt == PXP_PIX_FMT_NV12) |
228 (pix_fmt == PXP_PIX_FMT_NV16) |
229 (pix_fmt == PXP_PIX_FMT_NV61) |
230 (pix_fmt == PXP_PIX_FMT_GREY) |
231 (pix_fmt == PXP_PIX_FMT_GY04) |
232 (pix_fmt == PXP_PIX_FMT_YVU410P) |
233 (pix_fmt == PXP_PIX_FMT_YUV410P) |
234 (pix_fmt == PXP_PIX_FMT_YVU420P) |
235 (pix_fmt == PXP_PIX_FMT_YUV420P) |
236 (pix_fmt == PXP_PIX_FMT_YUV420P2) |
237 (pix_fmt == PXP_PIX_FMT_YVU422P) |
238 (pix_fmt == PXP_PIX_FMT_YUV422P)) {
245 static void pxp_set_ctrl(struct pxps *pxp)
247 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
248 struct pxp_proc_data *proc_data = &pxp_conf->proc_data;
251 int need_swap = 0; /* to support YUYV and YVYU formats */
253 /* Configure S0 input format */
254 switch (pxp_conf->s0_param.pixel_fmt) {
255 case PXP_PIX_FMT_RGB32:
256 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__RGB888;
258 case PXP_PIX_FMT_RGB565:
259 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__RGB565;
261 case PXP_PIX_FMT_RGB555:
262 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__RGB555;
264 case PXP_PIX_FMT_YUV420P:
265 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__YUV420;
267 case PXP_PIX_FMT_YVU420P:
268 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__YUV420;
270 case PXP_PIX_FMT_GREY:
271 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__Y8;
273 case PXP_PIX_FMT_GY04:
274 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__Y4;
276 case PXP_PIX_FMT_YUV422P:
277 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__YUV422;
279 case PXP_PIX_FMT_UYVY:
280 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__UYVY1P422;
282 case PXP_PIX_FMT_YUYV:
283 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__UYVY1P422;
286 case PXP_PIX_FMT_VYUY:
287 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__VYUY1P422;
289 case PXP_PIX_FMT_YVYU:
290 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__VYUY1P422;
293 case PXP_PIX_FMT_NV12:
294 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__YUV2P420;
296 case PXP_PIX_FMT_NV21:
297 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__YVU2P420;
299 case PXP_PIX_FMT_NV16:
300 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__YUV2P422;
302 case PXP_PIX_FMT_NV61:
303 fmt_ctrl = BV_PXP_PS_CTRL_FORMAT__YVU2P422;
309 ctrl = BF_PXP_PS_CTRL_FORMAT(fmt_ctrl) | BF_PXP_PS_CTRL_SWAP(need_swap);
310 __raw_writel(ctrl, pxp->base + HW_PXP_PS_CTRL_SET);
312 /* Configure output format based on out_channel format */
313 switch (pxp_conf->out_param.pixel_fmt) {
314 case PXP_PIX_FMT_RGB32:
315 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__RGB888;
317 case PXP_PIX_FMT_BGRA32:
318 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__ARGB8888;
320 case PXP_PIX_FMT_RGB24:
321 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__RGB888P;
323 case PXP_PIX_FMT_RGB565:
324 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__RGB565;
326 case PXP_PIX_FMT_RGB555:
327 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__RGB555;
329 case PXP_PIX_FMT_GREY:
330 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__Y8;
332 case PXP_PIX_FMT_GY04:
333 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__Y4;
335 case PXP_PIX_FMT_UYVY:
336 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__UYVY1P422;
338 case PXP_PIX_FMT_VYUY:
339 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__VYUY1P422;
341 case PXP_PIX_FMT_NV12:
342 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__YUV2P420;
344 case PXP_PIX_FMT_NV21:
345 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__YVU2P420;
347 case PXP_PIX_FMT_NV16:
348 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__YUV2P422;
350 case PXP_PIX_FMT_NV61:
351 fmt_ctrl = BV_PXP_OUT_CTRL_FORMAT__YVU2P422;
357 ctrl = BF_PXP_OUT_CTRL_FORMAT(fmt_ctrl);
358 __raw_writel(ctrl, pxp->base + HW_PXP_OUT_CTRL);
361 if (proc_data->scaling)
363 if (proc_data->vflip)
364 ctrl |= BM_PXP_CTRL_VFLIP;
365 if (proc_data->hflip)
366 ctrl |= BM_PXP_CTRL_HFLIP;
367 if (proc_data->rotate) {
368 ctrl |= BF_PXP_CTRL_ROTATE(proc_data->rotate / 90);
369 if (proc_data->rot_pos)
370 ctrl |= BM_PXP_CTRL_ROT_POS;
373 /* In default, the block size is set to 8x8
374 * But block size can be set to 16x16 due to
375 * blocksize variable modification
377 ctrl |= block_size << 23;
379 __raw_writel(ctrl, pxp->base + HW_PXP_CTRL);
382 static int pxp_start(struct pxps *pxp)
384 __raw_writel(BM_PXP_CTRL_IRQ_ENABLE, pxp->base + HW_PXP_CTRL_SET);
385 __raw_writel(BM_PXP_CTRL_ENABLE, pxp->base + HW_PXP_CTRL_SET);
391 static void pxp_set_outbuf(struct pxps *pxp)
393 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
394 struct pxp_layer_param *out_params = &pxp_conf->out_param;
396 __raw_writel(out_params->paddr, pxp->base + HW_PXP_OUT_BUF);
398 __raw_writel(BF_PXP_OUT_LRC_X(out_params->width - 1) |
399 BF_PXP_OUT_LRC_Y(out_params->height - 1),
400 pxp->base + HW_PXP_OUT_LRC);
402 if (out_params->pixel_fmt == PXP_PIX_FMT_RGB24) {
403 __raw_writel(out_params->stride * 3,
404 pxp->base + HW_PXP_OUT_PITCH);
405 } else if (out_params->pixel_fmt == PXP_PIX_FMT_BGRA32 ||
406 out_params->pixel_fmt == PXP_PIX_FMT_RGB32) {
407 __raw_writel(out_params->stride << 2,
408 pxp->base + HW_PXP_OUT_PITCH);
409 } else if (out_params->pixel_fmt == PXP_PIX_FMT_RGB565) {
410 __raw_writel(out_params->stride << 1,
411 pxp->base + HW_PXP_OUT_PITCH);
412 } else if (out_params->pixel_fmt == PXP_PIX_FMT_UYVY ||
413 (out_params->pixel_fmt == PXP_PIX_FMT_VYUY)) {
414 __raw_writel(out_params->stride << 1,
415 pxp->base + HW_PXP_OUT_PITCH);
416 } else if (out_params->pixel_fmt == PXP_PIX_FMT_GREY ||
417 out_params->pixel_fmt == PXP_PIX_FMT_NV12 ||
418 out_params->pixel_fmt == PXP_PIX_FMT_NV21 ||
419 out_params->pixel_fmt == PXP_PIX_FMT_NV16 ||
420 out_params->pixel_fmt == PXP_PIX_FMT_NV61) {
421 __raw_writel(out_params->stride,
422 pxp->base + HW_PXP_OUT_PITCH);
423 } else if (out_params->pixel_fmt == PXP_PIX_FMT_GY04) {
424 __raw_writel(out_params->stride >> 1,
425 pxp->base + HW_PXP_OUT_PITCH);
427 __raw_writel(0, pxp->base + HW_PXP_OUT_PITCH);
430 /* set global alpha if necessary */
431 if (out_params->global_alpha_enable) {
432 __raw_writel(out_params->global_alpha << 24,
433 pxp->base + HW_PXP_OUT_CTRL_SET);
434 __raw_writel(BM_PXP_OUT_CTRL_ALPHA_OUTPUT,
435 pxp->base + HW_PXP_OUT_CTRL_SET);
439 static void pxp_set_s0colorkey(struct pxps *pxp)
441 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
442 struct pxp_layer_param *s0_params = &pxp_conf->s0_param;
444 /* Low and high are set equal. V4L does not allow a chromakey range */
445 if (s0_params->color_key_enable == 0 || s0_params->color_key == -1) {
446 /* disable color key */
447 __raw_writel(0xFFFFFF, pxp->base + HW_PXP_PS_CLRKEYLOW);
448 __raw_writel(0, pxp->base + HW_PXP_PS_CLRKEYHIGH);
450 __raw_writel(s0_params->color_key,
451 pxp->base + HW_PXP_PS_CLRKEYLOW);
452 __raw_writel(s0_params->color_key,
453 pxp->base + HW_PXP_PS_CLRKEYHIGH);
457 static void pxp_set_olcolorkey(int layer_no, struct pxps *pxp)
459 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
460 struct pxp_layer_param *ol_params = &pxp_conf->ol_param[layer_no];
462 /* Low and high are set equal. V4L does not allow a chromakey range */
463 if (ol_params->color_key_enable != 0 && ol_params->color_key != -1) {
464 __raw_writel(ol_params->color_key,
465 pxp->base + HW_PXP_AS_CLRKEYLOW);
466 __raw_writel(ol_params->color_key,
467 pxp->base + HW_PXP_AS_CLRKEYHIGH);
469 /* disable color key */
470 __raw_writel(0xFFFFFF, pxp->base + HW_PXP_AS_CLRKEYLOW);
471 __raw_writel(0, pxp->base + HW_PXP_AS_CLRKEYHIGH);
475 static void pxp_set_oln(int layer_no, struct pxps *pxp)
477 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
478 struct pxp_layer_param *olparams_data = &pxp_conf->ol_param[layer_no];
479 dma_addr_t phys_addr = olparams_data->paddr;
480 u32 pitch = olparams_data->stride ? olparams_data->stride :
481 olparams_data->width;
483 __raw_writel(phys_addr, pxp->base + HW_PXP_AS_BUF);
486 if (olparams_data->width == 0 && olparams_data->height == 0) {
487 __raw_writel(0xffffffff, pxp->base + HW_PXP_OUT_AS_ULC);
488 __raw_writel(0x0, pxp->base + HW_PXP_OUT_AS_LRC);
490 __raw_writel(0x0, pxp->base + HW_PXP_OUT_AS_ULC);
491 if (pxp_conf->proc_data.rotate == 90 ||
492 pxp_conf->proc_data.rotate == 270) {
493 if (pxp_conf->proc_data.rot_pos == 1) {
494 __raw_writel(BF_PXP_OUT_AS_LRC_X(olparams_data->height - 1) |
495 BF_PXP_OUT_AS_LRC_Y(olparams_data->width - 1),
496 pxp->base + HW_PXP_OUT_AS_LRC);
498 __raw_writel(BF_PXP_OUT_AS_LRC_X(olparams_data->width - 1) |
499 BF_PXP_OUT_AS_LRC_Y(olparams_data->height - 1),
500 pxp->base + HW_PXP_OUT_AS_LRC);
503 __raw_writel(BF_PXP_OUT_AS_LRC_X(olparams_data->width - 1) |
504 BF_PXP_OUT_AS_LRC_Y(olparams_data->height - 1),
505 pxp->base + HW_PXP_OUT_AS_LRC);
509 if ((olparams_data->pixel_fmt == PXP_PIX_FMT_BGRA32) |
510 (olparams_data->pixel_fmt == PXP_PIX_FMT_RGB32)) {
511 __raw_writel(pitch << 2,
512 pxp->base + HW_PXP_AS_PITCH);
513 } else if (olparams_data->pixel_fmt == PXP_PIX_FMT_RGB565) {
514 __raw_writel(pitch << 1,
515 pxp->base + HW_PXP_AS_PITCH);
517 __raw_writel(0, pxp->base + HW_PXP_AS_PITCH);
521 static void pxp_set_olparam(int layer_no, struct pxps *pxp)
523 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
524 struct pxp_layer_param *olparams_data = &pxp_conf->ol_param[layer_no];
527 olparam = BF_PXP_AS_CTRL_ALPHA(olparams_data->global_alpha);
528 if (olparams_data->pixel_fmt == PXP_PIX_FMT_RGB32) {
530 BF_PXP_AS_CTRL_FORMAT(BV_PXP_AS_CTRL_FORMAT__RGB888);
531 } else if (olparams_data->pixel_fmt == PXP_PIX_FMT_BGRA32) {
533 BF_PXP_AS_CTRL_FORMAT(BV_PXP_AS_CTRL_FORMAT__ARGB8888);
534 if (!olparams_data->combine_enable) {
536 BF_PXP_AS_CTRL_ALPHA_CTRL
537 (BV_PXP_AS_CTRL_ALPHA_CTRL__ROPs);
538 olparam |= 0x3 << 16;
540 } else if (olparams_data->pixel_fmt == PXP_PIX_FMT_RGB565) {
542 BF_PXP_AS_CTRL_FORMAT(BV_PXP_AS_CTRL_FORMAT__RGB565);
544 if (olparams_data->global_alpha_enable) {
545 if (olparams_data->global_override) {
547 BF_PXP_AS_CTRL_ALPHA_CTRL
548 (BV_PXP_AS_CTRL_ALPHA_CTRL__Override);
551 BF_PXP_AS_CTRL_ALPHA_CTRL
552 (BV_PXP_AS_CTRL_ALPHA_CTRL__Multiply);
554 if (olparams_data->alpha_invert)
555 olparam |= BM_PXP_AS_CTRL_ALPHA_INVERT;
557 if (olparams_data->color_key_enable)
558 olparam |= BM_PXP_AS_CTRL_ENABLE_COLORKEY;
560 __raw_writel(olparam, pxp->base + HW_PXP_AS_CTRL);
563 static void pxp_set_s0param(struct pxps *pxp)
565 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
566 struct pxp_proc_data *proc_data = &pxp_conf->proc_data;
569 /* contains the coordinate for the PS in the OUTPUT buffer. */
570 if ((pxp_conf->s0_param).width == 0 &&
571 (pxp_conf->s0_param).height == 0) {
572 __raw_writel(0xffffffff, pxp->base + HW_PXP_OUT_PS_ULC);
573 __raw_writel(0x0, pxp->base + HW_PXP_OUT_PS_LRC);
575 s0param = BF_PXP_OUT_PS_ULC_X(proc_data->drect.left);
576 s0param |= BF_PXP_OUT_PS_ULC_Y(proc_data->drect.top);
577 __raw_writel(s0param, pxp->base + HW_PXP_OUT_PS_ULC);
578 s0param = BF_PXP_OUT_PS_LRC_X(proc_data->drect.left +
579 proc_data->drect.width - 1);
580 s0param |= BF_PXP_OUT_PS_LRC_Y(proc_data->drect.top +
581 proc_data->drect.height - 1);
582 __raw_writel(s0param, pxp->base + HW_PXP_OUT_PS_LRC);
586 /* crop behavior is re-designed in h/w. */
587 static void pxp_set_s0crop(struct pxps *pxp)
590 * place-holder, it's implemented in other functions in this driver.
591 * Refer to "Clipping source images" section in RM for detail.
595 static int pxp_set_scaling(struct pxps *pxp)
598 u32 xscale, yscale, s0scale;
599 u32 decx, decy, xdec = 0, ydec = 0;
600 struct pxp_proc_data *proc_data = &pxp->pxp_conf_state.proc_data;
602 if (((proc_data->srect.width == proc_data->drect.width) &&
603 (proc_data->srect.height == proc_data->drect.height)) ||
604 ((proc_data->srect.width == 0) && (proc_data->srect.height == 0))) {
605 proc_data->scaling = 0;
606 __raw_writel(0x10001000, pxp->base + HW_PXP_PS_SCALE);
607 __raw_writel(0, pxp->base + HW_PXP_PS_CTRL);
611 proc_data->scaling = 1;
612 decx = proc_data->srect.width / proc_data->drect.width;
613 decy = proc_data->srect.height / proc_data->drect.height;
615 if (decx >= 2 && decx < 4) {
618 } else if (decx >= 4 && decx < 8) {
621 } else if (decx >= 8) {
625 xscale = proc_data->srect.width * 0x1000 /
626 (proc_data->drect.width * decx);
628 xscale = proc_data->srect.width * 0x1000 /
629 proc_data->drect.width;
631 if (decy >= 2 && decy < 4) {
634 } else if (decy >= 4 && decy < 8) {
637 } else if (decy >= 8) {
641 yscale = proc_data->srect.height * 0x1000 /
642 (proc_data->drect.height * decy);
644 yscale = proc_data->srect.height * 0x1000 /
645 proc_data->drect.height;
647 __raw_writel((xdec << 10) | (ydec << 8), pxp->base + HW_PXP_PS_CTRL);
649 if (xscale > PXP_DOWNSCALE_THRESHOLD)
650 xscale = PXP_DOWNSCALE_THRESHOLD;
651 if (yscale > PXP_DOWNSCALE_THRESHOLD)
652 yscale = PXP_DOWNSCALE_THRESHOLD;
653 s0scale = BF_PXP_PS_SCALE_YSCALE(yscale) |
654 BF_PXP_PS_SCALE_XSCALE(xscale);
655 __raw_writel(s0scale, pxp->base + HW_PXP_PS_SCALE);
663 static void pxp_set_bg(struct pxps *pxp)
665 __raw_writel(pxp->pxp_conf_state.proc_data.bgcolor,
666 pxp->base + HW_PXP_PS_BACKGROUND);
669 static void pxp_set_lut(struct pxps *pxp)
671 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
672 int lut_op = pxp_conf->proc_data.lut_transform;
675 bool use_cmap = (lut_op & PXP_LUT_USE_CMAP) ? true : false;
676 u8 *cmap = pxp_conf->proc_data.lut_map;
682 * If LUT already configured as needed, return...
683 * Unless CMAP is needed and it has been updated.
685 if ((pxp->lut_state == lut_op) &&
686 !(use_cmap && pxp_conf->proc_data.lut_map_updated))
689 if (lut_op == PXP_LUT_NONE) {
690 __raw_writel(BM_PXP_LUT_CTRL_BYPASS,
691 pxp->base + HW_PXP_LUT_CTRL);
692 } else if (((lut_op & PXP_LUT_INVERT) != 0)
693 && ((lut_op & PXP_LUT_BLACK_WHITE) != 0)) {
694 /* Fill out LUT table with inverted monochromized values */
696 /* clear bypass bit, set lookup mode & out mode */
697 __raw_writel(BF_PXP_LUT_CTRL_LOOKUP_MODE
698 (BV_PXP_LUT_CTRL_LOOKUP_MODE__DIRECT_Y8) |
699 BF_PXP_LUT_CTRL_OUT_MODE
700 (BV_PXP_LUT_CTRL_OUT_MODE__Y8),
701 pxp->base + HW_PXP_LUT_CTRL);
703 /* Initialize LUT address to 0 and set NUM_BYTES to 0 */
704 __raw_writel(0, pxp->base + HW_PXP_LUT_ADDR);
706 /* LUT address pointer auto-increments after each data write */
707 for (pix_val = 0; pix_val < 256; pix_val += 4) {
708 for (i = 0; i < 4; i++) {
709 entry_src = use_cmap ?
710 cmap[pix_val + i] : pix_val + i;
711 entry[i] = (entry_src < 0x80) ? 0xFF : 0x00;
713 reg_val = (entry[3] << 24) | (entry[2] << 16) |
714 (entry[1] << 8) | entry[0];
715 __raw_writel(reg_val, pxp->base + HW_PXP_LUT_DATA);
717 } else if ((lut_op & PXP_LUT_INVERT) != 0) {
718 /* Fill out LUT table with 8-bit inverted values */
720 /* clear bypass bit, set lookup mode & out mode */
721 __raw_writel(BF_PXP_LUT_CTRL_LOOKUP_MODE
722 (BV_PXP_LUT_CTRL_LOOKUP_MODE__DIRECT_Y8) |
723 BF_PXP_LUT_CTRL_OUT_MODE
724 (BV_PXP_LUT_CTRL_OUT_MODE__Y8),
725 pxp->base + HW_PXP_LUT_CTRL);
727 /* Initialize LUT address to 0 and set NUM_BYTES to 0 */
728 __raw_writel(0, pxp->base + HW_PXP_LUT_ADDR);
730 /* LUT address pointer auto-increments after each data write */
731 for (pix_val = 0; pix_val < 256; pix_val += 4) {
732 for (i = 0; i < 4; i++) {
733 entry_src = use_cmap ?
734 cmap[pix_val + i] : pix_val + i;
735 entry[i] = ~entry_src & 0xFF;
737 reg_val = (entry[3] << 24) | (entry[2] << 16) |
738 (entry[1] << 8) | entry[0];
739 __raw_writel(reg_val, pxp->base + HW_PXP_LUT_DATA);
741 } else if ((lut_op & PXP_LUT_BLACK_WHITE) != 0) {
742 /* Fill out LUT table with 8-bit monochromized values */
744 /* clear bypass bit, set lookup mode & out mode */
745 __raw_writel(BF_PXP_LUT_CTRL_LOOKUP_MODE
746 (BV_PXP_LUT_CTRL_LOOKUP_MODE__DIRECT_Y8) |
747 BF_PXP_LUT_CTRL_OUT_MODE
748 (BV_PXP_LUT_CTRL_OUT_MODE__Y8),
749 pxp->base + HW_PXP_LUT_CTRL);
751 /* Initialize LUT address to 0 and set NUM_BYTES to 0 */
752 __raw_writel(0, pxp->base + HW_PXP_LUT_ADDR);
754 /* LUT address pointer auto-increments after each data write */
755 for (pix_val = 0; pix_val < 256; pix_val += 4) {
756 for (i = 0; i < 4; i++) {
757 entry_src = use_cmap ?
758 cmap[pix_val + i] : pix_val + i;
759 entry[i] = (entry_src < 0x80) ? 0x00 : 0xFF;
761 reg_val = (entry[3] << 24) | (entry[2] << 16) |
762 (entry[1] << 8) | entry[0];
763 __raw_writel(reg_val, pxp->base + HW_PXP_LUT_DATA);
765 } else if (use_cmap) {
766 /* Fill out LUT table using colormap values */
768 /* clear bypass bit, set lookup mode & out mode */
769 __raw_writel(BF_PXP_LUT_CTRL_LOOKUP_MODE
770 (BV_PXP_LUT_CTRL_LOOKUP_MODE__DIRECT_Y8) |
771 BF_PXP_LUT_CTRL_OUT_MODE
772 (BV_PXP_LUT_CTRL_OUT_MODE__Y8),
773 pxp->base + HW_PXP_LUT_CTRL);
775 /* Initialize LUT address to 0 and set NUM_BYTES to 0 */
776 __raw_writel(0, pxp->base + HW_PXP_LUT_ADDR);
778 /* LUT address pointer auto-increments after each data write */
779 for (pix_val = 0; pix_val < 256; pix_val += 4) {
780 for (i = 0; i < 4; i++)
781 entry[i] = cmap[pix_val + i];
782 reg_val = (entry[3] << 24) | (entry[2] << 16) |
783 (entry[1] << 8) | entry[0];
784 __raw_writel(reg_val, pxp->base + HW_PXP_LUT_DATA);
788 pxp->lut_state = lut_op;
791 static void pxp_set_csc(struct pxps *pxp)
793 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
794 struct pxp_layer_param *s0_params = &pxp_conf->s0_param;
795 struct pxp_layer_param *ol_params = &pxp_conf->ol_param[0];
796 struct pxp_layer_param *out_params = &pxp_conf->out_param;
798 bool input_is_YUV = is_yuv(s0_params->pixel_fmt);
799 bool output_is_YUV = is_yuv(out_params->pixel_fmt);
801 if (input_is_YUV && output_is_YUV) {
803 * Input = YUV, Output = YUV
804 * No CSC unless we need to do combining
806 if (ol_params->combine_enable) {
807 /* Must convert to RGB for combining with RGB overlay */
809 /* CSC1 - YUV->RGB */
810 __raw_writel(0x04030000, pxp->base + HW_PXP_CSC1_COEF0);
811 __raw_writel(0x01230208, pxp->base + HW_PXP_CSC1_COEF1);
812 __raw_writel(0x076b079c, pxp->base + HW_PXP_CSC1_COEF2);
814 /* CSC2 - RGB->YUV */
815 __raw_writel(0x4, pxp->base + HW_PXP_CSC2_CTRL);
816 __raw_writel(0x0096004D, pxp->base + HW_PXP_CSC2_COEF0);
817 __raw_writel(0x05DA001D, pxp->base + HW_PXP_CSC2_COEF1);
818 __raw_writel(0x007005B6, pxp->base + HW_PXP_CSC2_COEF2);
819 __raw_writel(0x057C009E, pxp->base + HW_PXP_CSC2_COEF3);
820 __raw_writel(0x000005E6, pxp->base + HW_PXP_CSC2_COEF4);
821 __raw_writel(0x00000000, pxp->base + HW_PXP_CSC2_COEF5);
823 /* Input & Output both YUV, so bypass both CSCs */
826 __raw_writel(0x40000000, pxp->base + HW_PXP_CSC1_COEF0);
829 __raw_writel(0x1, pxp->base + HW_PXP_CSC2_CTRL);
831 } else if (input_is_YUV && !output_is_YUV) {
833 * Input = YUV, Output = RGB
834 * Use CSC1 to convert to RGB
837 /* CSC1 - YUV->RGB */
838 __raw_writel(0x84ab01f0, pxp->base + HW_PXP_CSC1_COEF0);
839 __raw_writel(0x01980204, pxp->base + HW_PXP_CSC1_COEF1);
840 __raw_writel(0x0730079c, pxp->base + HW_PXP_CSC1_COEF2);
843 __raw_writel(0x1, pxp->base + HW_PXP_CSC2_CTRL);
844 } else if (!input_is_YUV && output_is_YUV) {
846 * Input = RGB, Output = YUV
847 * Use CSC2 to convert to YUV
851 __raw_writel(0x40000000, pxp->base + HW_PXP_CSC1_COEF0);
853 /* CSC2 - RGB->YUV */
854 __raw_writel(0x4, pxp->base + HW_PXP_CSC2_CTRL);
855 __raw_writel(0x0096004D, pxp->base + HW_PXP_CSC2_COEF0);
856 __raw_writel(0x05DA001D, pxp->base + HW_PXP_CSC2_COEF1);
857 __raw_writel(0x007005B6, pxp->base + HW_PXP_CSC2_COEF2);
858 __raw_writel(0x057C009E, pxp->base + HW_PXP_CSC2_COEF3);
859 __raw_writel(0x000005E6, pxp->base + HW_PXP_CSC2_COEF4);
860 __raw_writel(0x00000000, pxp->base + HW_PXP_CSC2_COEF5);
863 * Input = RGB, Output = RGB
864 * Input & Output both RGB, so bypass both CSCs
868 __raw_writel(0x40000000, pxp->base + HW_PXP_CSC1_COEF0);
871 __raw_writel(0x1, pxp->base + HW_PXP_CSC2_CTRL);
874 /* YCrCb colorspace */
875 /* Not sure when we use this...no YCrCb formats are defined for PxP */
877 __raw_writel(0x84ab01f0, HW_PXP_CSCCOEFF0_ADDR);
878 __raw_writel(0x01230204, HW_PXP_CSCCOEFF1_ADDR);
879 __raw_writel(0x0730079c, HW_PXP_CSCCOEFF2_ADDR);
884 static void pxp_set_s0buf(struct pxps *pxp)
886 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
887 struct pxp_layer_param *s0_params = &pxp_conf->s0_param;
888 struct pxp_proc_data *proc_data = &pxp_conf->proc_data;
890 dma_addr_t Y1, U1, V1;
892 u32 pitch = s0_params->stride ? s0_params->stride :
895 Y = s0_params->paddr;
897 if (s0_params->pixel_fmt == PXP_PIX_FMT_RGB565)
899 else if (s0_params->pixel_fmt == PXP_PIX_FMT_RGB32)
901 offset = (proc_data->srect.top * s0_params->width +
902 proc_data->srect.left) * bpp;
903 /* clipping or cropping */
905 __raw_writel(Y1, pxp->base + HW_PXP_PS_BUF);
906 if ((s0_params->pixel_fmt == PXP_PIX_FMT_YUV420P) ||
907 (s0_params->pixel_fmt == PXP_PIX_FMT_YVU420P) ||
908 (s0_params->pixel_fmt == PXP_PIX_FMT_GREY) ||
909 (s0_params->pixel_fmt == PXP_PIX_FMT_YUV422P)) {
910 /* Set to 1 if YUV format is 4:2:2 rather than 4:2:0 */
912 if (s0_params->pixel_fmt == PXP_PIX_FMT_YUV422P)
915 offset = proc_data->srect.top * s0_params->width / 4 +
916 proc_data->srect.left / 2;
917 U = Y + (s0_params->width * s0_params->height);
919 V = U + ((s0_params->width * s0_params->height) >> s);
921 __raw_writel(U1, pxp->base + HW_PXP_PS_UBUF);
922 __raw_writel(V1, pxp->base + HW_PXP_PS_VBUF);
923 } else if ((s0_params->pixel_fmt == PXP_PIX_FMT_NV12) ||
924 (s0_params->pixel_fmt == PXP_PIX_FMT_NV21) ||
925 (s0_params->pixel_fmt == PXP_PIX_FMT_NV16) ||
926 (s0_params->pixel_fmt == PXP_PIX_FMT_NV61)) {
928 if ((s0_params->pixel_fmt == PXP_PIX_FMT_NV16) ||
929 (s0_params->pixel_fmt == PXP_PIX_FMT_NV61))
932 offset = (proc_data->srect.top * s0_params->width +
933 proc_data->srect.left) / s;
934 U = Y + (s0_params->width * s0_params->height);
937 __raw_writel(U1, pxp->base + HW_PXP_PS_UBUF);
940 /* TODO: only support RGB565, Y8, Y4, YUV420 */
941 if (s0_params->pixel_fmt == PXP_PIX_FMT_GREY ||
942 s0_params->pixel_fmt == PXP_PIX_FMT_YUV420P ||
943 s0_params->pixel_fmt == PXP_PIX_FMT_YVU420P ||
944 s0_params->pixel_fmt == PXP_PIX_FMT_NV12 ||
945 s0_params->pixel_fmt == PXP_PIX_FMT_NV21 ||
946 s0_params->pixel_fmt == PXP_PIX_FMT_NV16 ||
947 s0_params->pixel_fmt == PXP_PIX_FMT_NV61 ||
948 s0_params->pixel_fmt == PXP_PIX_FMT_YUV422P) {
949 __raw_writel(pitch, pxp->base + HW_PXP_PS_PITCH);
951 else if (s0_params->pixel_fmt == PXP_PIX_FMT_GY04)
952 __raw_writel(pitch >> 1,
953 pxp->base + HW_PXP_PS_PITCH);
954 else if (s0_params->pixel_fmt == PXP_PIX_FMT_RGB32)
955 __raw_writel(pitch << 2,
956 pxp->base + HW_PXP_PS_PITCH);
957 else if (s0_params->pixel_fmt == PXP_PIX_FMT_UYVY ||
958 s0_params->pixel_fmt == PXP_PIX_FMT_YUYV ||
959 s0_params->pixel_fmt == PXP_PIX_FMT_VYUY ||
960 s0_params->pixel_fmt == PXP_PIX_FMT_YVYU)
961 __raw_writel(pitch << 1,
962 pxp->base + HW_PXP_PS_PITCH);
963 else if (s0_params->pixel_fmt == PXP_PIX_FMT_RGB565)
964 __raw_writel(pitch << 1,
965 pxp->base + HW_PXP_PS_PITCH);
967 __raw_writel(0, pxp->base + HW_PXP_PS_PITCH);
971 * pxp_config() - configure PxP for a processing task
972 * @pxps: PXP context.
973 * @pxp_chan: PXP channel.
974 * @return: 0 on success or negative error code on failure.
976 static int pxp_config(struct pxps *pxp, struct pxp_channel *pxp_chan)
978 struct pxp_config_data *pxp_conf_data = &pxp->pxp_conf_state;
982 /* Configure PxP regs */
984 pxp_set_s0param(pxp);
986 pxp_set_scaling(pxp);
987 ol_nr = pxp_conf_data->layer_nr - 2;
989 i = pxp_conf_data->layer_nr - 2 - ol_nr;
991 pxp_set_olparam(i, pxp);
992 /* only the color key in higher overlay will take effect. */
993 pxp_set_olcolorkey(i, pxp);
996 pxp_set_s0colorkey(pxp);
1002 pxp_set_outbuf(pxp);
1007 static void pxp_clk_enable(struct pxps *pxp)
1009 mutex_lock(&pxp->clk_mutex);
1011 if (pxp->clk_stat == CLK_STAT_ON) {
1012 mutex_unlock(&pxp->clk_mutex);
1016 clk_prepare_enable(pxp->clk);
1017 pxp->clk_stat = CLK_STAT_ON;
1019 mutex_unlock(&pxp->clk_mutex);
1022 static void pxp_clk_disable(struct pxps *pxp)
1024 unsigned long flags;
1026 mutex_lock(&pxp->clk_mutex);
1028 if (pxp->clk_stat == CLK_STAT_OFF) {
1029 mutex_unlock(&pxp->clk_mutex);
1033 spin_lock_irqsave(&pxp->lock, flags);
1034 if ((pxp->pxp_ongoing == 0) && list_empty(&head)) {
1035 spin_unlock_irqrestore(&pxp->lock, flags);
1036 clk_disable_unprepare(pxp->clk);
1037 pxp->clk_stat = CLK_STAT_OFF;
1039 spin_unlock_irqrestore(&pxp->lock, flags);
1041 mutex_unlock(&pxp->clk_mutex);
1044 static inline void clkoff_callback(struct work_struct *w)
1046 struct pxps *pxp = container_of(w, struct pxps, work);
1048 pxp_clk_disable(pxp);
1051 static void pxp_clkoff_timer(unsigned long arg)
1053 struct pxps *pxp = (struct pxps *)arg;
1055 if ((pxp->pxp_ongoing == 0) && list_empty(&head))
1056 schedule_work(&pxp->work);
1058 mod_timer(&pxp->clk_timer,
1059 jiffies + msecs_to_jiffies(timeout_in_ms));
1062 static struct pxp_tx_desc *pxpdma_first_active(struct pxp_channel *pxp_chan)
1064 return list_entry(pxp_chan->active_list.next, struct pxp_tx_desc, list);
1067 static struct pxp_tx_desc *pxpdma_first_queued(struct pxp_channel *pxp_chan)
1069 return list_entry(pxp_chan->queue.next, struct pxp_tx_desc, list);
1072 /* called with pxp_chan->lock held */
1073 static void __pxpdma_dostart(struct pxp_channel *pxp_chan)
1075 struct pxp_dma *pxp_dma = to_pxp_dma(pxp_chan->dma_chan.device);
1076 struct pxps *pxp = to_pxp(pxp_dma);
1077 struct pxp_tx_desc *desc;
1078 struct pxp_tx_desc *child;
1081 /* so far we presume only one transaction on active_list */
1083 desc = pxpdma_first_active(pxp_chan);
1084 memcpy(&pxp->pxp_conf_state.s0_param,
1085 &desc->layer_param.s0_param, sizeof(struct pxp_layer_param));
1086 memcpy(&pxp->pxp_conf_state.proc_data,
1087 &desc->proc_data, sizeof(struct pxp_proc_data));
1089 /* Save PxP configuration */
1090 list_for_each_entry(child, &desc->tx_list, list) {
1091 if (i == 0) { /* Output */
1092 memcpy(&pxp->pxp_conf_state.out_param,
1093 &child->layer_param.out_param,
1094 sizeof(struct pxp_layer_param));
1095 } else { /* Overlay */
1096 memcpy(&pxp->pxp_conf_state.ol_param[i - 1],
1097 &child->layer_param.ol_param,
1098 sizeof(struct pxp_layer_param));
1103 pr_debug("%s:%d S0 w/h %d/%d paddr %08x\n", __func__, __LINE__,
1104 pxp->pxp_conf_state.s0_param.width,
1105 pxp->pxp_conf_state.s0_param.height,
1106 pxp->pxp_conf_state.s0_param.paddr);
1107 pr_debug("%s:%d OUT w/h %d/%d paddr %08x\n", __func__, __LINE__,
1108 pxp->pxp_conf_state.out_param.width,
1109 pxp->pxp_conf_state.out_param.height,
1110 pxp->pxp_conf_state.out_param.paddr);
1113 static void pxpdma_dostart_work(struct pxps *pxp)
1115 struct pxp_channel *pxp_chan = NULL;
1116 unsigned long flags, flags1;
1118 spin_lock_irqsave(&pxp->lock, flags);
1119 if (list_empty(&head)) {
1120 pxp->pxp_ongoing = 0;
1121 spin_unlock_irqrestore(&pxp->lock, flags);
1125 pxp_chan = list_entry(head.next, struct pxp_channel, list);
1127 spin_lock_irqsave(&pxp_chan->lock, flags1);
1128 if (!list_empty(&pxp_chan->active_list)) {
1129 struct pxp_tx_desc *desc;
1131 desc = pxpdma_first_active(pxp_chan);
1132 __pxpdma_dostart(pxp_chan);
1134 spin_unlock_irqrestore(&pxp_chan->lock, flags1);
1137 pxp_config(pxp, pxp_chan);
1141 spin_unlock_irqrestore(&pxp->lock, flags);
1144 static void pxpdma_dequeue(struct pxp_channel *pxp_chan, struct list_head *list)
1146 struct pxp_tx_desc *desc = NULL;
1148 desc = pxpdma_first_queued(pxp_chan);
1149 list_move_tail(&desc->list, list);
1150 } while (!list_empty(&pxp_chan->queue));
1153 static dma_cookie_t pxp_tx_submit(struct dma_async_tx_descriptor *tx)
1155 struct pxp_tx_desc *desc = to_tx_desc(tx);
1156 struct pxp_channel *pxp_chan = to_pxp_channel(tx->chan);
1157 dma_cookie_t cookie;
1158 unsigned long flags;
1160 dev_dbg(&pxp_chan->dma_chan.dev->device, "received TX\n");
1162 mutex_lock(&pxp_chan->chan_mutex);
1164 cookie = pxp_chan->dma_chan.cookie;
1169 /* from dmaengine.h: "last cookie value returned to client" */
1170 pxp_chan->dma_chan.cookie = cookie;
1171 tx->cookie = cookie;
1173 /* pxp_chan->lock can be taken under ichan->lock, but not v.v. */
1174 spin_lock_irqsave(&pxp_chan->lock, flags);
1176 /* Here we add the tx descriptor to our PxP task queue. */
1177 list_add_tail(&desc->list, &pxp_chan->queue);
1179 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1181 dev_dbg(&pxp_chan->dma_chan.dev->device, "done TX\n");
1183 mutex_unlock(&pxp_chan->chan_mutex);
1187 /* Called with pxp_chan->chan_mutex held */
1188 static int pxp_desc_alloc(struct pxp_channel *pxp_chan, int n)
1190 struct pxp_tx_desc *desc = vmalloc(n * sizeof(struct pxp_tx_desc));
1195 pxp_chan->n_tx_desc = n;
1196 pxp_chan->desc = desc;
1197 INIT_LIST_HEAD(&pxp_chan->active_list);
1198 INIT_LIST_HEAD(&pxp_chan->queue);
1199 INIT_LIST_HEAD(&pxp_chan->free_list);
1202 struct dma_async_tx_descriptor *txd = &desc->txd;
1204 memset(txd, 0, sizeof(*txd));
1205 INIT_LIST_HEAD(&desc->tx_list);
1206 dma_async_tx_descriptor_init(txd, &pxp_chan->dma_chan);
1207 txd->tx_submit = pxp_tx_submit;
1209 list_add(&desc->list, &pxp_chan->free_list);
1218 * pxp_init_channel() - initialize a PXP channel.
1219 * @pxp_dma: PXP DMA context.
1220 * @pchan: pointer to the channel object.
1221 * @return 0 on success or negative error code on failure.
1223 static int pxp_init_channel(struct pxp_dma *pxp_dma,
1224 struct pxp_channel *pxp_chan)
1226 unsigned long flags;
1227 struct pxps *pxp = to_pxp(pxp_dma);
1228 int ret = 0, n_desc = 0;
1231 * We are using _virtual_ channel here.
1232 * Each channel contains all parameters of corresponding layers
1233 * for one transaction; each layer is represented as one descriptor
1234 * (i.e., pxp_tx_desc) here.
1237 spin_lock_irqsave(&pxp->lock, flags);
1239 /* max desc nr: S0+OL+OUT = 1+8+1 */
1242 spin_unlock_irqrestore(&pxp->lock, flags);
1244 if (n_desc && !pxp_chan->desc)
1245 ret = pxp_desc_alloc(pxp_chan, n_desc);
1251 * pxp_uninit_channel() - uninitialize a PXP channel.
1252 * @pxp_dma: PXP DMA context.
1253 * @pchan: pointer to the channel object.
1254 * @return 0 on success or negative error code on failure.
1256 static int pxp_uninit_channel(struct pxp_dma *pxp_dma,
1257 struct pxp_channel *pxp_chan)
1262 vfree(pxp_chan->desc);
1264 pxp_chan->desc = NULL;
1269 static irqreturn_t pxp_irq(int irq, void *dev_id)
1271 struct pxps *pxp = dev_id;
1272 struct pxp_channel *pxp_chan;
1273 struct pxp_tx_desc *desc;
1274 dma_async_tx_callback callback;
1275 void *callback_param;
1276 unsigned long flags;
1282 __raw_readl(pxp->base + HW_PXP_HIST_CTRL) & BM_PXP_HIST_CTRL_STATUS;
1284 __raw_writel(BM_PXP_STAT_IRQ, pxp->base + HW_PXP_STAT_CLR);
1286 spin_lock_irqsave(&pxp->lock, flags);
1288 if (list_empty(&head)) {
1289 pxp->pxp_ongoing = 0;
1290 spin_unlock_irqrestore(&pxp->lock, flags);
1294 pxp_chan = list_entry(head.next, struct pxp_channel, list);
1296 if (list_empty(&pxp_chan->active_list)) {
1297 pr_debug("PXP_IRQ pxp_chan->active_list empty. chan_id %d\n",
1298 pxp_chan->dma_chan.chan_id);
1299 pxp->pxp_ongoing = 0;
1300 spin_unlock_irqrestore(&pxp->lock, flags);
1304 /* Get descriptor and call callback */
1305 desc = pxpdma_first_active(pxp_chan);
1307 pxp_chan->completed = desc->txd.cookie;
1309 callback = desc->txd.callback;
1310 callback_param = desc->txd.callback_param;
1312 /* Send histogram status back to caller */
1313 desc->hist_status = hist_status;
1315 if ((desc->txd.flags & DMA_PREP_INTERRUPT) && callback)
1316 callback(callback_param);
1318 pxp_chan->status = PXP_CHANNEL_INITIALIZED;
1320 list_splice_init(&desc->tx_list, &pxp_chan->free_list);
1321 list_move(&desc->list, &pxp_chan->free_list);
1323 if (list_empty(&pxp_chan->active_list))
1324 list_del_init(&pxp_chan->list);
1326 complete(&pxp->complete);
1327 pxp->pxp_ongoing = 0;
1328 mod_timer(&pxp->clk_timer, jiffies + msecs_to_jiffies(timeout_in_ms));
1330 spin_unlock_irqrestore(&pxp->lock, flags);
1335 /* called with pxp_chan->lock held */
1336 static struct pxp_tx_desc *pxpdma_desc_get(struct pxp_channel *pxp_chan)
1338 struct pxp_tx_desc *desc, *_desc;
1339 struct pxp_tx_desc *ret = NULL;
1341 list_for_each_entry_safe(desc, _desc, &pxp_chan->free_list, list) {
1342 list_del_init(&desc->list);
1350 /* called with pxp_chan->lock held */
1351 static void pxpdma_desc_put(struct pxp_channel *pxp_chan,
1352 struct pxp_tx_desc *desc)
1355 struct device *dev = &pxp_chan->dma_chan.dev->device;
1356 struct pxp_tx_desc *child;
1358 list_for_each_entry(child, &desc->tx_list, list)
1359 dev_info(dev, "moving child desc %p to freelist\n", child);
1360 list_splice_init(&desc->tx_list, &pxp_chan->free_list);
1361 dev_info(dev, "moving desc %p to freelist\n", desc);
1362 list_add(&desc->list, &pxp_chan->free_list);
1366 /* Allocate and initialise a transfer descriptor. */
1367 static struct dma_async_tx_descriptor *pxp_prep_slave_sg(struct dma_chan *chan,
1370 unsigned int sg_len,
1372 dma_transfer_direction
1374 unsigned long tx_flags,
1377 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1378 struct pxp_dma *pxp_dma = to_pxp_dma(chan->device);
1379 struct pxps *pxp = to_pxp(pxp_dma);
1380 struct pxp_tx_desc *desc = NULL;
1381 struct pxp_tx_desc *first = NULL, *prev = NULL;
1382 struct scatterlist *sg;
1383 unsigned long flags;
1384 dma_addr_t phys_addr;
1387 if (direction != DMA_DEV_TO_MEM && direction != DMA_MEM_TO_DEV) {
1388 dev_err(chan->device->dev, "Invalid DMA direction %d!\n",
1393 if (unlikely(sg_len < 2))
1396 spin_lock_irqsave(&pxp_chan->lock, flags);
1397 for_each_sg(sgl, sg, sg_len, i) {
1398 desc = pxpdma_desc_get(pxp_chan);
1400 pxpdma_desc_put(pxp_chan, first);
1401 dev_err(chan->device->dev, "Can't get DMA desc.\n");
1402 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1406 phys_addr = sg_dma_address(sg);
1411 desc->layer_param.s0_param.paddr = phys_addr;
1413 list_add_tail(&desc->list, &first->tx_list);
1418 desc->layer_param.out_param.paddr = phys_addr;
1420 desc->layer_param.ol_param.paddr = phys_addr;
1425 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1427 pxp->pxp_conf_state.layer_nr = sg_len;
1428 first->txd.flags = tx_flags;
1429 first->len = sg_len;
1430 pr_debug("%s:%d first %p, first->len %d, flags %08x\n",
1431 __func__, __LINE__, first, first->len, first->txd.flags);
1436 static void pxp_issue_pending(struct dma_chan *chan)
1438 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1439 struct pxp_dma *pxp_dma = to_pxp_dma(chan->device);
1440 struct pxps *pxp = to_pxp(pxp_dma);
1441 unsigned long flags0, flags;
1442 struct list_head *iter;
1444 spin_lock_irqsave(&pxp->lock, flags0);
1445 spin_lock_irqsave(&pxp_chan->lock, flags);
1447 if (!list_empty(&pxp_chan->queue)) {
1448 pxpdma_dequeue(pxp_chan, &pxp_chan->active_list);
1449 pxp_chan->status = PXP_CHANNEL_READY;
1451 /* Avoid adding a pxp channel to head list which
1452 * has been already listed in it. And this may
1453 * cause the head list to be broken down.
1455 if (list_empty(&head)) {
1456 list_add_tail(&pxp_chan->list, &head);
1458 while (iter != &head) {
1459 if (&pxp_chan->list == iter)
1464 list_add_tail(&pxp_chan->list, &head);
1467 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1468 spin_unlock_irqrestore(&pxp->lock, flags0);
1471 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1472 spin_unlock_irqrestore(&pxp->lock, flags0);
1474 pxp_clk_enable(pxp);
1475 wake_up_interruptible(&pxp->thread_waitq);
1478 static void __pxp_terminate_all(struct dma_chan *chan)
1480 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1481 unsigned long flags;
1483 /* pchan->queue is modified in ISR, have to spinlock */
1484 spin_lock_irqsave(&pxp_chan->lock, flags);
1485 list_splice_init(&pxp_chan->queue, &pxp_chan->free_list);
1486 list_splice_init(&pxp_chan->active_list, &pxp_chan->free_list);
1488 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1490 pxp_chan->status = PXP_CHANNEL_INITIALIZED;
1493 static int pxp_control(struct dma_chan *chan, enum dma_ctrl_cmd cmd,
1496 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1498 /* Only supports DMA_TERMINATE_ALL */
1499 if (cmd != DMA_TERMINATE_ALL)
1502 mutex_lock(&pxp_chan->chan_mutex);
1503 __pxp_terminate_all(chan);
1504 mutex_unlock(&pxp_chan->chan_mutex);
1509 static int pxp_alloc_chan_resources(struct dma_chan *chan)
1511 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1512 struct pxp_dma *pxp_dma = to_pxp_dma(chan->device);
1515 /* dmaengine.c now guarantees to only offer free channels */
1516 BUG_ON(chan->client_count > 1);
1517 WARN_ON(pxp_chan->status != PXP_CHANNEL_FREE);
1520 pxp_chan->completed = -ENXIO;
1522 pr_debug("%s dma_chan.chan_id %d\n", __func__, chan->chan_id);
1523 ret = pxp_init_channel(pxp_dma, pxp_chan);
1527 pxp_chan->status = PXP_CHANNEL_INITIALIZED;
1529 dev_dbg(&chan->dev->device, "Found channel 0x%x, irq %d\n",
1530 chan->chan_id, pxp_chan->eof_irq);
1538 static void pxp_free_chan_resources(struct dma_chan *chan)
1540 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1541 struct pxp_dma *pxp_dma = to_pxp_dma(chan->device);
1543 mutex_lock(&pxp_chan->chan_mutex);
1545 __pxp_terminate_all(chan);
1547 pxp_chan->status = PXP_CHANNEL_FREE;
1549 pxp_uninit_channel(pxp_dma, pxp_chan);
1551 mutex_unlock(&pxp_chan->chan_mutex);
1554 static enum dma_status pxp_tx_status(struct dma_chan *chan,
1555 dma_cookie_t cookie,
1556 struct dma_tx_state *txstate)
1558 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1560 if (cookie != chan->cookie)
1564 txstate->last = pxp_chan->completed;
1565 txstate->used = chan->cookie;
1566 txstate->residue = 0;
1571 static int pxp_hw_init(struct pxps *pxp)
1573 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
1574 struct pxp_proc_data *proc_data = &pxp_conf->proc_data;
1577 /* Pull PxP out of reset */
1578 __raw_writel(0, pxp->base + HW_PXP_CTRL);
1580 /* Config defaults */
1582 /* Initialize non-channel-specific PxP parameters */
1583 proc_data->drect.left = proc_data->srect.left = 0;
1584 proc_data->drect.top = proc_data->srect.top = 0;
1585 proc_data->drect.width = proc_data->srect.width = 0;
1586 proc_data->drect.height = proc_data->srect.height = 0;
1587 proc_data->scaling = 0;
1588 proc_data->hflip = 0;
1589 proc_data->vflip = 0;
1590 proc_data->rotate = 0;
1591 proc_data->bgcolor = 0;
1593 /* Initialize S0 channel parameters */
1594 pxp_conf->s0_param.pixel_fmt = pxp_s0_formats[0];
1595 pxp_conf->s0_param.width = 0;
1596 pxp_conf->s0_param.height = 0;
1597 pxp_conf->s0_param.color_key = -1;
1598 pxp_conf->s0_param.color_key_enable = false;
1600 /* Initialize OL channel parameters */
1601 pxp_conf->ol_param[0].combine_enable = false;
1602 pxp_conf->ol_param[0].width = 0;
1603 pxp_conf->ol_param[0].height = 0;
1604 pxp_conf->ol_param[0].pixel_fmt = PXP_PIX_FMT_RGB565;
1605 pxp_conf->ol_param[0].color_key_enable = false;
1606 pxp_conf->ol_param[0].color_key = -1;
1607 pxp_conf->ol_param[0].global_alpha_enable = false;
1608 pxp_conf->ol_param[0].global_alpha = 0;
1609 pxp_conf->ol_param[0].local_alpha_enable = false;
1611 /* Initialize Output channel parameters */
1612 pxp_conf->out_param.width = 0;
1613 pxp_conf->out_param.height = 0;
1614 pxp_conf->out_param.pixel_fmt = PXP_PIX_FMT_RGB565;
1616 proc_data->overlay_state = 0;
1618 /* Write default h/w config */
1620 pxp_set_s0param(pxp);
1621 pxp_set_s0crop(pxp);
1623 * simply program the ULC to a higher value than the LRC
1624 * to avoid any AS pixels to show up in the output buffer.
1626 __raw_writel(0xFFFFFFFF, pxp->base + HW_PXP_OUT_AS_ULC);
1627 pxp_set_olparam(0, pxp);
1628 pxp_set_olcolorkey(0, pxp);
1630 pxp_set_s0colorkey(pxp);
1635 /* One-time histogram configuration */
1637 BF_PXP_HIST_CTRL_PANEL_MODE(BV_PXP_HIST_CTRL_PANEL_MODE__GRAY16);
1638 __raw_writel(reg_val, pxp->base + HW_PXP_HIST_CTRL);
1640 reg_val = BF_PXP_HIST2_PARAM_VALUE0(0x00) |
1641 BF_PXP_HIST2_PARAM_VALUE1(0x00F);
1642 __raw_writel(reg_val, pxp->base + HW_PXP_HIST2_PARAM);
1644 reg_val = BF_PXP_HIST4_PARAM_VALUE0(0x00) |
1645 BF_PXP_HIST4_PARAM_VALUE1(0x05) |
1646 BF_PXP_HIST4_PARAM_VALUE2(0x0A) | BF_PXP_HIST4_PARAM_VALUE3(0x0F);
1647 __raw_writel(reg_val, pxp->base + HW_PXP_HIST4_PARAM);
1649 reg_val = BF_PXP_HIST8_PARAM0_VALUE0(0x00) |
1650 BF_PXP_HIST8_PARAM0_VALUE1(0x02) |
1651 BF_PXP_HIST8_PARAM0_VALUE2(0x04) | BF_PXP_HIST8_PARAM0_VALUE3(0x06);
1652 __raw_writel(reg_val, pxp->base + HW_PXP_HIST8_PARAM0);
1653 reg_val = BF_PXP_HIST8_PARAM1_VALUE4(0x09) |
1654 BF_PXP_HIST8_PARAM1_VALUE5(0x0B) |
1655 BF_PXP_HIST8_PARAM1_VALUE6(0x0D) | BF_PXP_HIST8_PARAM1_VALUE7(0x0F);
1656 __raw_writel(reg_val, pxp->base + HW_PXP_HIST8_PARAM1);
1658 reg_val = BF_PXP_HIST16_PARAM0_VALUE0(0x00) |
1659 BF_PXP_HIST16_PARAM0_VALUE1(0x01) |
1660 BF_PXP_HIST16_PARAM0_VALUE2(0x02) |
1661 BF_PXP_HIST16_PARAM0_VALUE3(0x03);
1662 __raw_writel(reg_val, pxp->base + HW_PXP_HIST16_PARAM0);
1663 reg_val = BF_PXP_HIST16_PARAM1_VALUE4(0x04) |
1664 BF_PXP_HIST16_PARAM1_VALUE5(0x05) |
1665 BF_PXP_HIST16_PARAM1_VALUE6(0x06) |
1666 BF_PXP_HIST16_PARAM1_VALUE7(0x07);
1667 __raw_writel(reg_val, pxp->base + HW_PXP_HIST16_PARAM1);
1668 reg_val = BF_PXP_HIST16_PARAM2_VALUE8(0x08) |
1669 BF_PXP_HIST16_PARAM2_VALUE9(0x09) |
1670 BF_PXP_HIST16_PARAM2_VALUE10(0x0A) |
1671 BF_PXP_HIST16_PARAM2_VALUE11(0x0B);
1672 __raw_writel(reg_val, pxp->base + HW_PXP_HIST16_PARAM2);
1673 reg_val = BF_PXP_HIST16_PARAM3_VALUE12(0x0C) |
1674 BF_PXP_HIST16_PARAM3_VALUE13(0x0D) |
1675 BF_PXP_HIST16_PARAM3_VALUE14(0x0E) |
1676 BF_PXP_HIST16_PARAM3_VALUE15(0x0F);
1677 __raw_writel(reg_val, pxp->base + HW_PXP_HIST16_PARAM3);
1682 static int pxp_dma_init(struct pxps *pxp)
1684 struct pxp_dma *pxp_dma = &pxp->pxp_dma;
1685 struct dma_device *dma = &pxp_dma->dma;
1688 dma_cap_set(DMA_SLAVE, dma->cap_mask);
1689 dma_cap_set(DMA_PRIVATE, dma->cap_mask);
1691 /* Compulsory common fields */
1692 dma->dev = pxp->dev;
1693 dma->device_alloc_chan_resources = pxp_alloc_chan_resources;
1694 dma->device_free_chan_resources = pxp_free_chan_resources;
1695 dma->device_tx_status = pxp_tx_status;
1696 dma->device_issue_pending = pxp_issue_pending;
1698 /* Compulsory for DMA_SLAVE fields */
1699 dma->device_prep_slave_sg = pxp_prep_slave_sg;
1700 dma->device_control = pxp_control;
1702 /* Initialize PxP Channels */
1703 INIT_LIST_HEAD(&dma->channels);
1704 for (i = 0; i < NR_PXP_VIRT_CHANNEL; i++) {
1705 struct pxp_channel *pxp_chan = pxp->channel + i;
1706 struct dma_chan *dma_chan = &pxp_chan->dma_chan;
1708 spin_lock_init(&pxp_chan->lock);
1709 mutex_init(&pxp_chan->chan_mutex);
1711 /* Only one EOF IRQ for PxP, shared by all channels */
1712 pxp_chan->eof_irq = pxp->irq;
1713 pxp_chan->status = PXP_CHANNEL_FREE;
1714 pxp_chan->completed = -ENXIO;
1715 snprintf(pxp_chan->eof_name, sizeof(pxp_chan->eof_name),
1718 dma_chan->device = &pxp_dma->dma;
1719 dma_chan->cookie = 1;
1720 dma_chan->chan_id = i;
1721 list_add_tail(&dma_chan->device_node, &dma->channels);
1724 return dma_async_device_register(&pxp_dma->dma);
1727 static ssize_t clk_off_timeout_show(struct device *dev,
1728 struct device_attribute *attr, char *buf)
1730 return sprintf(buf, "%d\n", timeout_in_ms);
1733 static ssize_t clk_off_timeout_store(struct device *dev,
1734 struct device_attribute *attr,
1735 const char *buf, size_t count)
1738 if (sscanf(buf, "%d", &val) > 0) {
1739 timeout_in_ms = val;
1745 static DEVICE_ATTR(clk_off_timeout, 0644, clk_off_timeout_show,
1746 clk_off_timeout_store);
1748 static ssize_t block_size_show(struct device *dev,
1749 struct device_attribute *attr,
1752 return sprintf(buf, "%d\n", block_size);
1755 static ssize_t block_size_store(struct device *dev,
1756 struct device_attribute *attr,
1757 const char *buf, size_t count)
1761 block_size = simple_strtoul(buf, last, 0);
1767 static DEVICE_ATTR(block_size, S_IWUSR | S_IRUGO,
1768 block_size_show, block_size_store);
1770 static const struct of_device_id imx_pxpdma_dt_ids[] = {
1771 { .compatible = "fsl,imx6dl-pxp-dma", },
1774 MODULE_DEVICE_TABLE(of, imx_pxpdma_dt_ids);
1776 static int has_pending_task(struct pxps *pxp, struct pxp_channel *task)
1779 unsigned long flags;
1781 spin_lock_irqsave(&pxp->lock, flags);
1782 found = !list_empty(&head);
1783 spin_unlock_irqrestore(&pxp->lock, flags);
1788 static int pxp_dispatch_thread(void *argv)
1790 struct pxps *pxp = (struct pxps *)argv;
1791 struct pxp_channel *pending = NULL;
1792 unsigned long flags;
1794 while (!kthread_should_stop()) {
1796 ret = wait_event_interruptible(pxp->thread_waitq,
1797 has_pending_task(pxp, pending));
1798 if (signal_pending(current))
1801 spin_lock_irqsave(&pxp->lock, flags);
1802 pxp->pxp_ongoing = 1;
1803 spin_unlock_irqrestore(&pxp->lock, flags);
1804 init_completion(&pxp->complete);
1805 pxpdma_dostart_work(pxp);
1806 ret = wait_for_completion_timeout(&pxp->complete, 2 * HZ);
1808 printk(KERN_EMERG "%s: task is timeout\n\n", __func__);
1816 static int pxp_probe(struct platform_device *pdev)
1819 struct resource *res;
1823 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1824 irq = platform_get_irq(pdev, 0);
1825 if (!res || irq < 0) {
1830 pxp = devm_kzalloc(&pdev->dev, sizeof(*pxp), GFP_KERNEL);
1832 dev_err(&pdev->dev, "failed to allocate control object\n");
1837 pxp->dev = &pdev->dev;
1839 platform_set_drvdata(pdev, pxp);
1842 pxp->pxp_ongoing = 0;
1845 spin_lock_init(&pxp->lock);
1846 mutex_init(&pxp->clk_mutex);
1848 pxp->base = devm_request_and_ioremap(&pdev->dev, res);
1849 if (pxp->base == NULL) {
1850 dev_err(&pdev->dev, "Couldn't ioremap regs\n");
1857 pxp->clk = devm_clk_get(&pdev->dev, "pxp-axi");
1858 clk_prepare_enable(pxp->clk);
1860 err = pxp_hw_init(pxp);
1861 clk_disable_unprepare(pxp->clk);
1863 dev_err(&pdev->dev, "failed to initialize hardware\n");
1867 err = devm_request_irq(&pdev->dev, pxp->irq, pxp_irq, 0,
1868 "pxp-dmaengine", pxp);
1871 /* Initialize DMA engine */
1872 err = pxp_dma_init(pxp);
1876 if (device_create_file(&pdev->dev, &dev_attr_clk_off_timeout)) {
1878 "Unable to create file from clk_off_timeout\n");
1882 device_create_file(&pdev->dev, &dev_attr_block_size);
1885 INIT_WORK(&pxp->work, clkoff_callback);
1886 init_timer(&pxp->clk_timer);
1887 pxp->clk_timer.function = pxp_clkoff_timer;
1888 pxp->clk_timer.data = (unsigned long)pxp;
1890 /* allocate a kernel thread to dispatch pxp conf */
1891 pxp->dispatch = kthread_run(pxp_dispatch_thread, pxp, "pxp_dispatch");
1892 if (IS_ERR(pxp->dispatch)) {
1893 err = PTR_ERR(pxp->dispatch);
1896 init_waitqueue_head(&pxp->thread_waitq);
1898 register_pxp_device();
1902 dev_err(&pdev->dev, "Exiting (unsuccessfully) pxp_probe()\n");
1906 static int pxp_remove(struct platform_device *pdev)
1908 struct pxps *pxp = platform_get_drvdata(pdev);
1910 unregister_pxp_device();
1911 kthread_stop(pxp->dispatch);
1912 cancel_work_sync(&pxp->work);
1913 del_timer_sync(&pxp->clk_timer);
1914 clk_disable_unprepare(pxp->clk);
1915 device_remove_file(&pdev->dev, &dev_attr_clk_off_timeout);
1916 device_remove_file(&pdev->dev, &dev_attr_block_size);
1917 dma_async_device_unregister(&(pxp->pxp_dma.dma));
1923 static int pxp_suspend(struct platform_device *pdev, pm_message_t state)
1925 struct pxps *pxp = platform_get_drvdata(pdev);
1927 pxp_clk_enable(pxp);
1928 while (__raw_readl(pxp->base + HW_PXP_CTRL) & BM_PXP_CTRL_ENABLE)
1931 __raw_writel(BM_PXP_CTRL_SFTRST, pxp->base + HW_PXP_CTRL);
1932 pxp_clk_disable(pxp);
1937 static int pxp_resume(struct platform_device *pdev)
1939 struct pxps *pxp = platform_get_drvdata(pdev);
1941 pxp_clk_enable(pxp);
1942 /* Pull PxP out of reset */
1943 __raw_writel(0, pxp->base + HW_PXP_CTRL);
1944 pxp_clk_disable(pxp);
1949 #define pxp_suspend NULL
1950 #define pxp_resume NULL
1953 static struct platform_driver pxp_driver = {
1956 .of_match_table = of_match_ptr(imx_pxpdma_dt_ids),
1959 .remove = pxp_remove,
1960 .suspend = pxp_suspend,
1961 .resume = pxp_resume,
1964 module_platform_driver(pxp_driver);
1967 MODULE_DESCRIPTION("i.MX PxP driver");
1968 MODULE_AUTHOR("Freescale Semiconductor, Inc.");
1969 MODULE_LICENSE("GPL");