2 * Copyright 2005-2011 Freescale Semiconductor, Inc. All Rights Reserved.
6 * The code contained herein is licensed under the GNU General Public
7 * License. You may obtain a copy of the GNU General Public License
8 * Version 2 or later at the following locations:
10 * http://www.opensource.org/licenses/gpl-license.html
11 * http://www.gnu.org/copyleft/gpl.html
17 * @brief This file contains the IPU driver common API functions.
21 #include <linux/types.h>
22 #include <linux/init.h>
23 #include <linux/platform_device.h>
24 #include <linux/err.h>
25 #include <linux/spinlock.h>
26 #include <linux/delay.h>
27 #include <linux/interrupt.h>
29 #include <linux/ipu.h>
30 #include <linux/clk.h>
31 #include <linux/clkdev.h>
32 #include <mach/clock.h>
33 #include <mach/hardware.h>
34 #include <mach/ipu-v3.h>
35 #include <mach/devices-common.h>
39 #include "ipu_param_mem.h"
42 irqreturn_t(*handler) (int, void *); /*!< the ISR */
43 const char *name; /*!< device associated with the interrupt */
44 void *dev_id; /*!< some unique information for the ISR */
45 __u32 flags; /*!< not used */
49 struct clk *g_ipu_clk;
50 bool g_ipu_clk_enabled;
51 struct clk *g_di_clk[2];
52 struct clk *g_pixel_clk[2];
53 struct clk *g_csi_clk[2];
54 unsigned char g_dc_di_assignment[10];
55 ipu_channel_t g_ipu_csi_channel[2];
58 bool g_sec_chan_en[24];
59 bool g_thrd_chan_en[24];
60 bool g_chan_is_interlaced[52];
61 uint32_t g_channel_init_mask;
62 uint32_t g_channel_enable_mask;
63 DEFINE_SPINLOCK(ipu_lock);
64 struct device *g_ipu_dev;
66 static struct ipu_irq_node ipu_irq_list[IPU_IRQ_COUNT];
68 static int ipu_dc_use_count;
69 static int ipu_dp_use_count;
70 static int ipu_dmfc_use_count;
71 static int ipu_smfc_use_count;
72 static int ipu_ic_use_count;
73 static int ipu_rot_use_count;
74 static int ipu_vdi_use_count;
75 static int ipu_di_use_count[2];
76 static int ipu_csi_use_count[2];
77 /* Set to the follow using IC direct channel, default non */
78 static ipu_channel_t using_ic_dirct_ch;
80 /* for power gating */
81 static uint32_t ipu_conf_reg;
82 static uint32_t ic_conf_reg;
83 static uint32_t ipu_cha_db_mode_reg[4];
84 static uint32_t ipu_cha_trb_mode_reg[2];
85 static uint32_t ipu_cha_cur_buf_reg[4];
86 static uint32_t ipu_cha_triple_cur_buf_reg[4];
87 static uint32_t idma_sub_addr_reg[5];
88 static uint32_t idma_enable_reg[2];
89 static uint32_t buf_ready_reg[10];
103 u32 *ipu_disp_base[2];
106 /* Static functions */
107 static irqreturn_t ipu_irq_handler(int irq, void *desc);
109 static inline uint32_t channel_2_dma(ipu_channel_t ch, ipu_buffer_t type)
111 return ((uint32_t) ch >> (6 * type)) & 0x3F;
114 static inline int _ipu_is_ic_chan(uint32_t dma_chan)
116 return ((dma_chan >= 11) && (dma_chan <= 22) && (dma_chan != 17) && (dma_chan != 18));
119 static inline int _ipu_is_ic_graphic_chan(uint32_t dma_chan)
121 return (dma_chan == 14 || dma_chan == 15);
124 /* Either DP BG or DP FG can be graphic window */
125 static inline int _ipu_is_dp_graphic_chan(uint32_t dma_chan)
127 return (dma_chan == 23 || dma_chan == 27);
130 static inline int _ipu_is_irt_chan(uint32_t dma_chan)
132 return ((dma_chan >= 45) && (dma_chan <= 50));
135 static inline int _ipu_is_dmfc_chan(uint32_t dma_chan)
137 return ((dma_chan >= 23) && (dma_chan <= 29));
140 static inline int _ipu_is_smfc_chan(uint32_t dma_chan)
142 return ((dma_chan >= 0) && (dma_chan <= 3));
145 static inline int _ipu_is_trb_chan(uint32_t dma_chan)
147 return (((dma_chan == 8) || (dma_chan == 9) ||
148 (dma_chan == 10) || (dma_chan == 13) ||
149 (dma_chan == 21) || (dma_chan == 23) ||
150 (dma_chan == 27) || (dma_chan == 28)) &&
151 (g_ipu_hw_rev >= 2));
154 #define idma_is_valid(ch) (ch != NO_DMA)
155 #define idma_mask(ch) (idma_is_valid(ch) ? (1UL << (ch & 0x1F)) : 0)
156 #define idma_is_set(reg, dma) (__raw_readl(reg(dma)) & idma_mask(dma))
157 #define tri_cur_buf_mask(ch) (idma_mask(ch*2) * 3)
158 #define tri_cur_buf_shift(ch) (ffs(idma_mask(ch*2)) - 1)
160 static unsigned long _ipu_pixel_clk_get_rate(struct clk *clk)
162 u32 div = __raw_readl(DI_BS_CLKGEN0(clk->id));
165 return (clk_get_rate(clk->parent) * 16) / div;
168 static unsigned long _ipu_pixel_clk_round_rate(struct clk *clk, unsigned long rate)
171 u32 parent_rate = clk_get_rate(clk->parent) * 16;
174 * Fractional part is 4 bits,
175 * so simply multiply by 2^4 to get fractional part.
177 div = parent_rate / rate;
179 if (div < 0x10) /* Min DI disp clock divider is 1 */
185 if ((parent_rate / div1 - parent_rate / div) < rate / 4)
190 return parent_rate / div;
193 static int _ipu_pixel_clk_set_rate(struct clk *clk, unsigned long rate)
195 u32 div = (clk_get_rate(clk->parent) * 16) / rate;
197 __raw_writel(div, DI_BS_CLKGEN0(clk->id));
199 /* Setup pixel clock timing */
200 /* FIXME: needs to be more flexible */
201 /* Down time is half of period */
202 __raw_writel((div / 16) << 16, DI_BS_CLKGEN1(clk->id));
207 static int _ipu_pixel_clk_enable(struct clk *clk)
209 u32 disp_gen = __raw_readl(IPU_DISP_GEN);
210 disp_gen |= clk->id ? DI1_COUNTER_RELEASE : DI0_COUNTER_RELEASE;
211 __raw_writel(disp_gen, IPU_DISP_GEN);
216 static void _ipu_pixel_clk_disable(struct clk *clk)
218 u32 disp_gen = __raw_readl(IPU_DISP_GEN);
219 disp_gen &= clk->id ? ~DI1_COUNTER_RELEASE : ~DI0_COUNTER_RELEASE;
220 __raw_writel(disp_gen, IPU_DISP_GEN);
223 static int _ipu_pixel_clk_set_parent(struct clk *clk, struct clk *parent)
225 u32 di_gen = __raw_readl(DI_GENERAL(clk->id));
227 if (parent == g_ipu_clk)
228 di_gen &= ~DI_GEN_DI_CLK_EXT;
229 else if (!IS_ERR(g_di_clk[clk->id]) && parent == g_di_clk[clk->id])
230 di_gen |= DI_GEN_DI_CLK_EXT;
234 __raw_writel(di_gen, DI_GENERAL(clk->id));
238 #ifdef CONFIG_CLK_DEBUG
239 #define __INIT_CLK_DEBUG(n) .name = #n,
241 #define __INIT_CLK_DEBUG(n)
243 static struct clk pixel_clk[] = {
245 __INIT_CLK_DEBUG(pixel_clk_0)
247 .get_rate = _ipu_pixel_clk_get_rate,
248 .set_rate = _ipu_pixel_clk_set_rate,
249 .round_rate = _ipu_pixel_clk_round_rate,
250 .set_parent = _ipu_pixel_clk_set_parent,
251 .enable = _ipu_pixel_clk_enable,
252 .disable = _ipu_pixel_clk_disable,
255 __INIT_CLK_DEBUG(pixel_clk_1)
257 .get_rate = _ipu_pixel_clk_get_rate,
258 .set_rate = _ipu_pixel_clk_set_rate,
259 .round_rate = _ipu_pixel_clk_round_rate,
260 .set_parent = _ipu_pixel_clk_set_parent,
261 .enable = _ipu_pixel_clk_enable,
262 .disable = _ipu_pixel_clk_disable,
266 #define _REGISTER_CLOCK(d, n, c) \
273 static struct clk_lookup ipu_lookups[] = {
274 _REGISTER_CLOCK(NULL, "pixel_clk_0", pixel_clk[0]),
275 _REGISTER_CLOCK(NULL, "pixel_clk_1", pixel_clk[1]),
279 int __initdata primary_di = { 0 };
280 static int __init di1_setup(char *__unused)
282 primary_di = MXC_PRI_DI1;
285 __setup("di1_primary", di1_setup);
287 static int __init di0_setup(char *__unused)
289 primary_di = MXC_PRI_DI0;
292 __setup("di0_primary", di0_setup);
294 struct platform_device *__init imx_add_ipuv3_fb(
295 const struct ipuv3_fb_platform_data *pdata, int id)
298 if (pdata->res_size > 0) {
299 struct resource res[] = {
301 .start = pdata->res_base,
302 .end = pdata->res_base + pdata->res_size - 1,
303 .flags = IORESOURCE_MEM,
307 return imx_add_platform_device_dmamask("mxc_sdc_fb",
308 id, res, ARRAY_SIZE(res), pdata,
309 sizeof(*pdata), DMA_BIT_MASK(32));
311 return imx_add_platform_device_dmamask("mxc_sdc_fb", id,
312 NULL, 0, pdata, sizeof(*pdata),
315 return imx_add_platform_device_dmamask("mxc_sdc_fb", id,
316 NULL, 0, NULL, 0, DMA_BIT_MASK(32));
319 static int __init register_fb_device(struct platform_device *pdev)
321 struct imx_ipuv3_platform_data *plat_data = pdev->dev.platform_data;
324 primary_di = plat_data->primary_di;
326 if (primary_di == MXC_PRI_DI1) {
327 dev_info(g_ipu_dev, "DI1 is primary\n");
328 /* DI1 -> DP-BG channel: */
329 imx_add_ipuv3_fb(plat_data->fb_head1_platform_data, 1);
330 /* DI0 -> DC channel: */
331 plat_data->fb_head0_platform_data->res_base = 0;
332 plat_data->fb_head0_platform_data->res_size = 0;
333 imx_add_ipuv3_fb(plat_data->fb_head0_platform_data, 0);
335 dev_info(g_ipu_dev, "DI0 is primary\n");
336 /* DI0 -> DP-BG channel: */
337 imx_add_ipuv3_fb(plat_data->fb_head0_platform_data, 0);
338 /* DI1 -> DC channel: */
339 plat_data->fb_head1_platform_data->res_base = 0;
340 plat_data->fb_head1_platform_data->res_size = 0;
341 imx_add_ipuv3_fb(plat_data->fb_head1_platform_data, 1);
345 * DI0/1 DP-FG channel:
347 imx_add_ipuv3_fb(NULL, 2);
353 * This function is called by the driver framework to initialize the IPU
356 * @param dev The device structure for the IPU passed in by the
359 * @return Returns 0 on success or negative error code on error
361 static int ipu_probe(struct platform_device *pdev)
363 struct resource *res;
364 struct imx_ipuv3_platform_data *plat_data = pdev->dev.platform_data;
365 unsigned long ipu_base;
367 spin_lock_init(&ipu_lock);
369 g_ipu_hw_rev = plat_data->rev;
371 g_ipu_dev = &pdev->dev;
373 /* Register IPU interrupts */
374 g_ipu_irq[0] = platform_get_irq(pdev, 0);
375 if (g_ipu_irq[0] < 0)
378 if (request_irq(g_ipu_irq[0], ipu_irq_handler, 0, pdev->name, 0) != 0) {
379 dev_err(g_ipu_dev, "request SYNC interrupt failed\n");
382 /* Some platforms have 2 IPU interrupts */
383 g_ipu_irq[1] = platform_get_irq(pdev, 1);
384 if (g_ipu_irq[1] >= 0) {
386 (g_ipu_irq[1], ipu_irq_handler, 0, pdev->name, 0) != 0) {
387 dev_err(g_ipu_dev, "request ERR interrupt failed\n");
392 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
396 ipu_base = res->start;
397 if (g_ipu_hw_rev == 3) /* IPUv3M */
398 ipu_base += IPUV3M_REG_BASE;
399 else /* IPUv3D, v3E, v3EX */
400 ipu_base += IPU_REG_BASE;
402 ipu_cm_reg = ioremap(ipu_base + IPU_CM_REG_BASE, PAGE_SIZE);
403 ipu_ic_reg = ioremap(ipu_base + IPU_IC_REG_BASE, PAGE_SIZE);
404 ipu_idmac_reg = ioremap(ipu_base + IPU_IDMAC_REG_BASE, PAGE_SIZE);
405 /* DP Registers are accessed thru the SRM */
406 ipu_dp_reg = ioremap(ipu_base + IPU_SRM_REG_BASE, PAGE_SIZE);
407 ipu_dc_reg = ioremap(ipu_base + IPU_DC_REG_BASE, PAGE_SIZE);
408 ipu_dmfc_reg = ioremap(ipu_base + IPU_DMFC_REG_BASE, PAGE_SIZE);
409 ipu_di_reg[0] = ioremap(ipu_base + IPU_DI0_REG_BASE, PAGE_SIZE);
410 ipu_di_reg[1] = ioremap(ipu_base + IPU_DI1_REG_BASE, PAGE_SIZE);
411 ipu_smfc_reg = ioremap(ipu_base + IPU_SMFC_REG_BASE, PAGE_SIZE);
412 ipu_csi_reg[0] = ioremap(ipu_base + IPU_CSI0_REG_BASE, PAGE_SIZE);
413 ipu_csi_reg[1] = ioremap(ipu_base + IPU_CSI1_REG_BASE, PAGE_SIZE);
414 ipu_cpmem_base = ioremap(ipu_base + IPU_CPMEM_REG_BASE, SZ_128K);
415 ipu_tpmem_base = ioremap(ipu_base + IPU_TPM_REG_BASE, SZ_64K);
416 ipu_dc_tmpl_reg = ioremap(ipu_base + IPU_DC_TMPL_REG_BASE, SZ_128K);
417 ipu_disp_base[1] = ioremap(ipu_base + IPU_DISP1_BASE, SZ_4K);
418 ipu_vdi_reg = ioremap(ipu_base + IPU_VDI_REG_BASE, PAGE_SIZE);
420 dev_dbg(g_ipu_dev, "IPU VDI Regs = %p\n", ipu_vdi_reg);
421 dev_dbg(g_ipu_dev, "IPU CM Regs = %p\n", ipu_cm_reg);
422 dev_dbg(g_ipu_dev, "IPU IC Regs = %p\n", ipu_ic_reg);
423 dev_dbg(g_ipu_dev, "IPU IDMAC Regs = %p\n", ipu_idmac_reg);
424 dev_dbg(g_ipu_dev, "IPU DP Regs = %p\n", ipu_dp_reg);
425 dev_dbg(g_ipu_dev, "IPU DC Regs = %p\n", ipu_dc_reg);
426 dev_dbg(g_ipu_dev, "IPU DMFC Regs = %p\n", ipu_dmfc_reg);
427 dev_dbg(g_ipu_dev, "IPU DI0 Regs = %p\n", ipu_di_reg[0]);
428 dev_dbg(g_ipu_dev, "IPU DI1 Regs = %p\n", ipu_di_reg[1]);
429 dev_dbg(g_ipu_dev, "IPU SMFC Regs = %p\n", ipu_smfc_reg);
430 dev_dbg(g_ipu_dev, "IPU CSI0 Regs = %p\n", ipu_csi_reg[0]);
431 dev_dbg(g_ipu_dev, "IPU CSI1 Regs = %p\n", ipu_csi_reg[1]);
432 dev_dbg(g_ipu_dev, "IPU CPMem = %p\n", ipu_cpmem_base);
433 dev_dbg(g_ipu_dev, "IPU TPMem = %p\n", ipu_tpmem_base);
434 dev_dbg(g_ipu_dev, "IPU DC Template Mem = %p\n", ipu_dc_tmpl_reg);
435 dev_dbg(g_ipu_dev, "IPU Display Region 1 Mem = %p\n", ipu_disp_base[1]);
437 clkdev_add(&ipu_lookups[0]);
438 clkdev_add(&ipu_lookups[1]);
439 clk_debug_register(&pixel_clk[0]);
440 clk_debug_register(&pixel_clk[1]);
442 g_pixel_clk[0] = &pixel_clk[0];
443 g_pixel_clk[1] = &pixel_clk[1];
445 /* Enable IPU and CSI clocks */
446 /* Get IPU clock freq */
447 g_ipu_clk = clk_get(&pdev->dev, "ipu_clk");
448 dev_dbg(g_ipu_dev, "ipu_clk = %lu\n", clk_get_rate(g_ipu_clk));
453 clk_set_parent(g_pixel_clk[0], g_ipu_clk);
454 clk_set_parent(g_pixel_clk[1], g_ipu_clk);
455 clk_enable(g_ipu_clk);
457 g_di_clk[0] = clk_get(&pdev->dev, "ipu_di0_clk");
458 g_di_clk[1] = clk_get(&pdev->dev, "ipu_di1_clk");
460 g_csi_clk[0] = plat_data->csi_clk[0];
461 g_csi_clk[1] = plat_data->csi_clk[1];
463 __raw_writel(0x807FFFFF, IPU_MEM_RST);
464 while (__raw_readl(IPU_MEM_RST) & 0x80000000)
467 _ipu_init_dc_mappings();
469 /* Enable error interrupts by default */
470 __raw_writel(0xFFFFFFFF, IPU_INT_CTRL(5));
471 __raw_writel(0xFFFFFFFF, IPU_INT_CTRL(6));
472 __raw_writel(0xFFFFFFFF, IPU_INT_CTRL(9));
473 __raw_writel(0xFFFFFFFF, IPU_INT_CTRL(10));
476 _ipu_dmfc_init(DMFC_NORMAL, 1);
478 /* Set sync refresh channels and CSI->mem channel as high priority */
479 __raw_writel(0x18800001L, IDMAC_CHA_PRI(0));
481 /* Set MCU_T to divide MCU access window into 2 */
482 __raw_writel(0x00400000L | (IPU_MCU_T_DEFAULT << 18), IPU_DISP_GEN);
484 clk_disable(g_ipu_clk);
486 register_fb_device(pdev);
488 register_ipu_device();
493 int ipu_remove(struct platform_device *pdev)
496 free_irq(g_ipu_irq[0], 0);
498 free_irq(g_ipu_irq[1], 0);
504 iounmap(ipu_idmac_reg);
507 iounmap(ipu_dmfc_reg);
508 iounmap(ipu_di_reg[0]);
509 iounmap(ipu_di_reg[1]);
510 iounmap(ipu_smfc_reg);
511 iounmap(ipu_csi_reg[0]);
512 iounmap(ipu_csi_reg[1]);
513 iounmap(ipu_cpmem_base);
514 iounmap(ipu_tpmem_base);
515 iounmap(ipu_dc_tmpl_reg);
516 iounmap(ipu_disp_base[1]);
517 iounmap(ipu_vdi_reg);
522 void ipu_dump_registers(void)
524 printk(KERN_DEBUG "IPU_CONF = \t0x%08X\n", __raw_readl(IPU_CONF));
525 printk(KERN_DEBUG "IDMAC_CONF = \t0x%08X\n", __raw_readl(IDMAC_CONF));
526 printk(KERN_DEBUG "IDMAC_CHA_EN1 = \t0x%08X\n",
527 __raw_readl(IDMAC_CHA_EN(0)));
528 printk(KERN_DEBUG "IDMAC_CHA_EN2 = \t0x%08X\n",
529 __raw_readl(IDMAC_CHA_EN(32)));
530 printk(KERN_DEBUG "IDMAC_CHA_PRI1 = \t0x%08X\n",
531 __raw_readl(IDMAC_CHA_PRI(0)));
532 printk(KERN_DEBUG "IDMAC_CHA_PRI2 = \t0x%08X\n",
533 __raw_readl(IDMAC_CHA_PRI(32)));
534 printk(KERN_DEBUG "IDMAC_BAND_EN1 = \t0x%08X\n",
535 __raw_readl(IDMAC_BAND_EN(0)));
536 printk(KERN_DEBUG "IDMAC_BAND_EN2 = \t0x%08X\n",
537 __raw_readl(IDMAC_BAND_EN(32)));
538 printk(KERN_DEBUG "IPU_CHA_DB_MODE_SEL0 = \t0x%08X\n",
539 __raw_readl(IPU_CHA_DB_MODE_SEL(0)));
540 printk(KERN_DEBUG "IPU_CHA_DB_MODE_SEL1 = \t0x%08X\n",
541 __raw_readl(IPU_CHA_DB_MODE_SEL(32)));
542 if (g_ipu_hw_rev >= 2) {
543 printk(KERN_DEBUG "IPU_CHA_TRB_MODE_SEL0 = \t0x%08X\n",
544 __raw_readl(IPU_CHA_TRB_MODE_SEL(0)));
545 printk(KERN_DEBUG "IPU_CHA_TRB_MODE_SEL1 = \t0x%08X\n",
546 __raw_readl(IPU_CHA_TRB_MODE_SEL(32)));
548 printk(KERN_DEBUG "DMFC_WR_CHAN = \t0x%08X\n",
549 __raw_readl(DMFC_WR_CHAN));
550 printk(KERN_DEBUG "DMFC_WR_CHAN_DEF = \t0x%08X\n",
551 __raw_readl(DMFC_WR_CHAN_DEF));
552 printk(KERN_DEBUG "DMFC_DP_CHAN = \t0x%08X\n",
553 __raw_readl(DMFC_DP_CHAN));
554 printk(KERN_DEBUG "DMFC_DP_CHAN_DEF = \t0x%08X\n",
555 __raw_readl(DMFC_DP_CHAN_DEF));
556 printk(KERN_DEBUG "DMFC_IC_CTRL = \t0x%08X\n",
557 __raw_readl(DMFC_IC_CTRL));
558 printk(KERN_DEBUG "IPU_FS_PROC_FLOW1 = \t0x%08X\n",
559 __raw_readl(IPU_FS_PROC_FLOW1));
560 printk(KERN_DEBUG "IPU_FS_PROC_FLOW2 = \t0x%08X\n",
561 __raw_readl(IPU_FS_PROC_FLOW2));
562 printk(KERN_DEBUG "IPU_FS_PROC_FLOW3 = \t0x%08X\n",
563 __raw_readl(IPU_FS_PROC_FLOW3));
564 printk(KERN_DEBUG "IPU_FS_DISP_FLOW1 = \t0x%08X\n",
565 __raw_readl(IPU_FS_DISP_FLOW1));
569 * This function is called to initialize a logical IPU channel.
571 * @param channel Input parameter for the logical channel ID to init.
573 * @param params Input parameter containing union of channel
574 * initialization parameters.
576 * @return Returns 0 on success or negative error code on fail
578 int32_t ipu_init_channel(ipu_channel_t channel, ipu_channel_params_t *params)
583 unsigned long lock_flags;
585 dev_dbg(g_ipu_dev, "init channel = %d\n", IPU_CHAN_ID(channel));
587 /* re-enable error interrupts every time a channel is initialized */
588 __raw_writel(0xFFFFFFFF, IPU_INT_CTRL(5));
589 __raw_writel(0xFFFFFFFF, IPU_INT_CTRL(6));
590 __raw_writel(0xFFFFFFFF, IPU_INT_CTRL(9));
591 __raw_writel(0xFFFFFFFF, IPU_INT_CTRL(10));
593 if (g_ipu_clk_enabled == false) {
594 g_ipu_clk_enabled = true;
595 clk_enable(g_ipu_clk);
598 spin_lock_irqsave(&ipu_lock, lock_flags);
600 if (g_channel_init_mask & (1L << IPU_CHAN_ID(channel))) {
601 dev_err(g_ipu_dev, "Warning: channel already initialized %d\n",
602 IPU_CHAN_ID(channel));
605 ipu_conf = __raw_readl(IPU_CONF);
612 if (params->csi_mem.csi > 1) {
617 if (params->csi_mem.interlaced)
618 g_chan_is_interlaced[channel_2_dma(channel,
619 IPU_OUTPUT_BUFFER)] = true;
621 g_chan_is_interlaced[channel_2_dma(channel,
622 IPU_OUTPUT_BUFFER)] = false;
624 ipu_smfc_use_count++;
625 g_ipu_csi_channel[params->csi_mem.csi] = channel;
628 if (params->csi_mem.mipi_en) {
629 ipu_conf |= (1 << (IPU_CONF_CSI0_DATA_SOURCE_OFFSET +
630 params->csi_mem.csi));
631 _ipu_smfc_init(channel, params->csi_mem.mipi_id,
632 params->csi_mem.csi);
634 ipu_conf &= ~(1 << (IPU_CONF_CSI0_DATA_SOURCE_OFFSET +
635 params->csi_mem.csi));
636 _ipu_smfc_init(channel, 0, params->csi_mem.csi);
639 /*CSI data (include compander) dest*/
640 _ipu_csi_init(channel, params->csi_mem.csi);
642 case CSI_PRP_ENC_MEM:
643 if (params->csi_prp_enc_mem.csi > 1) {
647 if (using_ic_dirct_ch == MEM_VDI_PRP_VF_MEM) {
651 using_ic_dirct_ch = CSI_PRP_ENC_MEM;
654 g_ipu_csi_channel[params->csi_prp_enc_mem.csi] = channel;
656 /*Without SMFC, CSI only support parallel data source*/
657 ipu_conf &= ~(1 << (IPU_CONF_CSI0_DATA_SOURCE_OFFSET +
658 params->csi_prp_enc_mem.csi));
660 /*CSI0/1 feed into IC*/
661 ipu_conf &= ~IPU_CONF_IC_INPUT;
662 if (params->csi_prp_enc_mem.csi)
663 ipu_conf |= IPU_CONF_CSI_SEL;
665 ipu_conf &= ~IPU_CONF_CSI_SEL;
667 /*PRP skip buffer in memory, only valid when RWS_EN is true*/
668 reg = __raw_readl(IPU_FS_PROC_FLOW1);
669 __raw_writel(reg & ~FS_ENC_IN_VALID, IPU_FS_PROC_FLOW1);
671 /*CSI data (include compander) dest*/
672 _ipu_csi_init(channel, params->csi_prp_enc_mem.csi);
673 _ipu_ic_init_prpenc(params, true);
676 if (params->csi_prp_vf_mem.csi > 1) {
680 if (using_ic_dirct_ch == MEM_VDI_PRP_VF_MEM) {
684 using_ic_dirct_ch = CSI_PRP_VF_MEM;
687 g_ipu_csi_channel[params->csi_prp_vf_mem.csi] = channel;
689 /*Without SMFC, CSI only support parallel data source*/
690 ipu_conf &= ~(1 << (IPU_CONF_CSI0_DATA_SOURCE_OFFSET +
691 params->csi_prp_vf_mem.csi));
693 /*CSI0/1 feed into IC*/
694 ipu_conf &= ~IPU_CONF_IC_INPUT;
695 if (params->csi_prp_vf_mem.csi)
696 ipu_conf |= IPU_CONF_CSI_SEL;
698 ipu_conf &= ~IPU_CONF_CSI_SEL;
700 /*PRP skip buffer in memory, only valid when RWS_EN is true*/
701 reg = __raw_readl(IPU_FS_PROC_FLOW1);
702 __raw_writel(reg & ~FS_VF_IN_VALID, IPU_FS_PROC_FLOW1);
704 /*CSI data (include compander) dest*/
705 _ipu_csi_init(channel, params->csi_prp_vf_mem.csi);
706 _ipu_ic_init_prpvf(params, true);
710 reg = __raw_readl(IPU_FS_PROC_FLOW1);
711 __raw_writel(reg | FS_VF_IN_VALID, IPU_FS_PROC_FLOW1);
713 if (params->mem_prp_vf_mem.graphics_combine_en)
714 g_sec_chan_en[IPU_CHAN_ID(channel)] = true;
715 if (params->mem_prp_vf_mem.alpha_chan_en)
716 g_thrd_chan_en[IPU_CHAN_ID(channel)] = true;
718 _ipu_ic_init_prpvf(params, false);
720 case MEM_VDI_PRP_VF_MEM:
721 if ((using_ic_dirct_ch == CSI_PRP_VF_MEM) ||
722 (using_ic_dirct_ch == CSI_PRP_ENC_MEM)) {
726 using_ic_dirct_ch = MEM_VDI_PRP_VF_MEM;
729 reg = __raw_readl(IPU_FS_PROC_FLOW1);
730 reg &= ~FS_VDI_SRC_SEL_MASK;
731 __raw_writel(reg , IPU_FS_PROC_FLOW1);
733 if (params->mem_prp_vf_mem.graphics_combine_en)
734 g_sec_chan_en[IPU_CHAN_ID(channel)] = true;
735 _ipu_ic_init_prpvf(params, false);
736 _ipu_vdi_init(channel, params);
738 case MEM_VDI_PRP_VF_MEM_P:
739 _ipu_vdi_init(channel, params);
741 case MEM_VDI_PRP_VF_MEM_N:
742 _ipu_vdi_init(channel, params);
747 _ipu_ic_init_rotate_vf(params);
749 case MEM_PRP_ENC_MEM:
751 reg = __raw_readl(IPU_FS_PROC_FLOW1);
752 __raw_writel(reg | FS_ENC_IN_VALID, IPU_FS_PROC_FLOW1);
753 _ipu_ic_init_prpenc(params, false);
755 case MEM_ROT_ENC_MEM:
758 _ipu_ic_init_rotate_enc(params);
761 if (params->mem_pp_mem.graphics_combine_en)
762 g_sec_chan_en[IPU_CHAN_ID(channel)] = true;
763 if (params->mem_pp_mem.alpha_chan_en)
764 g_thrd_chan_en[IPU_CHAN_ID(channel)] = true;
765 _ipu_ic_init_pp(params);
769 _ipu_ic_init_rotate_pp(params);
774 if (params->mem_dc_sync.di > 1) {
779 g_dc_di_assignment[1] = params->mem_dc_sync.di;
780 _ipu_dc_init(1, params->mem_dc_sync.di,
781 params->mem_dc_sync.interlaced,
782 params->mem_dc_sync.out_pixel_fmt);
783 ipu_di_use_count[params->mem_dc_sync.di]++;
785 ipu_dmfc_use_count++;
788 if (params->mem_dp_bg_sync.di > 1) {
793 if (params->mem_dp_bg_sync.alpha_chan_en)
794 g_thrd_chan_en[IPU_CHAN_ID(channel)] = true;
796 g_dc_di_assignment[5] = params->mem_dp_bg_sync.di;
797 _ipu_dp_init(channel, params->mem_dp_bg_sync.in_pixel_fmt,
798 params->mem_dp_bg_sync.out_pixel_fmt);
799 _ipu_dc_init(5, params->mem_dp_bg_sync.di,
800 params->mem_dp_bg_sync.interlaced,
801 params->mem_dp_bg_sync.out_pixel_fmt);
802 ipu_di_use_count[params->mem_dp_bg_sync.di]++;
805 ipu_dmfc_use_count++;
808 _ipu_dp_init(channel, params->mem_dp_fg_sync.in_pixel_fmt,
809 params->mem_dp_fg_sync.out_pixel_fmt);
811 if (params->mem_dp_fg_sync.alpha_chan_en)
812 g_thrd_chan_en[IPU_CHAN_ID(channel)] = true;
816 ipu_dmfc_use_count++;
819 if (params->direct_async.di > 1) {
824 g_dc_di_assignment[8] = params->direct_async.di;
825 _ipu_dc_init(8, params->direct_async.di, false, IPU_PIX_FMT_GENERIC);
826 ipu_di_use_count[params->direct_async.di]++;
830 if (params->direct_async.di > 1) {
835 g_dc_di_assignment[9] = params->direct_async.di;
836 _ipu_dc_init(9, params->direct_async.di, false, IPU_PIX_FMT_GENERIC);
837 ipu_di_use_count[params->direct_async.di]++;
841 dev_err(g_ipu_dev, "Missing channel initialization\n");
845 /* Enable IPU sub module */
846 g_channel_init_mask |= 1L << IPU_CHAN_ID(channel);
848 __raw_writel(ipu_conf, IPU_CONF);
851 spin_unlock_irqrestore(&ipu_lock, lock_flags);
854 EXPORT_SYMBOL(ipu_init_channel);
857 * This function is called to uninitialize a logical IPU channel.
859 * @param channel Input parameter for the logical channel ID to uninit.
861 void ipu_uninit_channel(ipu_channel_t channel)
863 unsigned long lock_flags;
865 uint32_t in_dma, out_dma = 0;
868 if ((g_channel_init_mask & (1L << IPU_CHAN_ID(channel))) == 0) {
869 dev_err(g_ipu_dev, "Channel already uninitialized %d\n",
870 IPU_CHAN_ID(channel));
874 /* Make sure channel is disabled */
875 /* Get input and output dma channels */
876 in_dma = channel_2_dma(channel, IPU_VIDEO_IN_BUFFER);
877 out_dma = channel_2_dma(channel, IPU_OUTPUT_BUFFER);
879 if (idma_is_set(IDMAC_CHA_EN, in_dma) ||
880 idma_is_set(IDMAC_CHA_EN, out_dma)) {
882 "Channel %d is not disabled, disable first\n",
883 IPU_CHAN_ID(channel));
887 spin_lock_irqsave(&ipu_lock, lock_flags);
889 ipu_conf = __raw_readl(IPU_CONF);
891 /* Reset the double buffer */
892 reg = __raw_readl(IPU_CHA_DB_MODE_SEL(in_dma));
893 __raw_writel(reg & ~idma_mask(in_dma), IPU_CHA_DB_MODE_SEL(in_dma));
894 reg = __raw_readl(IPU_CHA_DB_MODE_SEL(out_dma));
895 __raw_writel(reg & ~idma_mask(out_dma), IPU_CHA_DB_MODE_SEL(out_dma));
897 /* Reset the triple buffer */
898 reg = __raw_readl(IPU_CHA_TRB_MODE_SEL(in_dma));
899 __raw_writel(reg & ~idma_mask(in_dma), IPU_CHA_TRB_MODE_SEL(in_dma));
900 reg = __raw_readl(IPU_CHA_TRB_MODE_SEL(out_dma));
901 __raw_writel(reg & ~idma_mask(out_dma), IPU_CHA_TRB_MODE_SEL(out_dma));
903 if (_ipu_is_ic_chan(in_dma) || _ipu_is_dp_graphic_chan(in_dma)) {
904 g_sec_chan_en[IPU_CHAN_ID(channel)] = false;
905 g_thrd_chan_en[IPU_CHAN_ID(channel)] = false;
913 ipu_smfc_use_count--;
914 if (g_ipu_csi_channel[0] == channel) {
915 g_ipu_csi_channel[0] = CHAN_NONE;
916 } else if (g_ipu_csi_channel[1] == channel) {
917 g_ipu_csi_channel[1] = CHAN_NONE;
920 case CSI_PRP_ENC_MEM:
922 if (using_ic_dirct_ch == CSI_PRP_ENC_MEM)
923 using_ic_dirct_ch = 0;
924 _ipu_ic_uninit_prpenc();
925 if (g_ipu_csi_channel[0] == channel) {
926 g_ipu_csi_channel[0] = CHAN_NONE;
927 } else if (g_ipu_csi_channel[1] == channel) {
928 g_ipu_csi_channel[1] = CHAN_NONE;
933 if (using_ic_dirct_ch == CSI_PRP_VF_MEM)
934 using_ic_dirct_ch = 0;
935 _ipu_ic_uninit_prpvf();
936 if (g_ipu_csi_channel[0] == channel) {
937 g_ipu_csi_channel[0] = CHAN_NONE;
938 } else if (g_ipu_csi_channel[1] == channel) {
939 g_ipu_csi_channel[1] = CHAN_NONE;
944 _ipu_ic_uninit_prpvf();
945 reg = __raw_readl(IPU_FS_PROC_FLOW1);
946 __raw_writel(reg & ~FS_VF_IN_VALID, IPU_FS_PROC_FLOW1);
948 case MEM_VDI_PRP_VF_MEM:
951 if (using_ic_dirct_ch == MEM_VDI_PRP_VF_MEM)
952 using_ic_dirct_ch = 0;
953 _ipu_ic_uninit_prpvf();
955 reg = __raw_readl(IPU_FS_PROC_FLOW1);
956 __raw_writel(reg & ~FS_VF_IN_VALID, IPU_FS_PROC_FLOW1);
958 case MEM_VDI_PRP_VF_MEM_P:
959 case MEM_VDI_PRP_VF_MEM_N:
964 _ipu_ic_uninit_rotate_vf();
966 case MEM_PRP_ENC_MEM:
968 _ipu_ic_uninit_prpenc();
969 reg = __raw_readl(IPU_FS_PROC_FLOW1);
970 __raw_writel(reg & ~FS_ENC_IN_VALID, IPU_FS_PROC_FLOW1);
972 case MEM_ROT_ENC_MEM:
975 _ipu_ic_uninit_rotate_enc();
984 _ipu_ic_uninit_rotate_pp();
988 ipu_di_use_count[g_dc_di_assignment[1]]--;
990 ipu_dmfc_use_count--;
993 _ipu_dp_uninit(channel);
995 ipu_di_use_count[g_dc_di_assignment[5]]--;
998 ipu_dmfc_use_count--;
1001 _ipu_dp_uninit(channel);
1004 ipu_dmfc_use_count--;
1008 ipu_di_use_count[g_dc_di_assignment[8]]--;
1013 ipu_di_use_count[g_dc_di_assignment[9]]--;
1020 g_channel_init_mask &= ~(1L << IPU_CHAN_ID(channel));
1022 if (ipu_ic_use_count == 0)
1023 ipu_conf &= ~IPU_CONF_IC_EN;
1024 if (ipu_vdi_use_count == 0) {
1025 ipu_conf &= ~IPU_CONF_ISP_EN;
1026 ipu_conf &= ~IPU_CONF_VDI_EN;
1027 ipu_conf &= ~IPU_CONF_IC_INPUT;
1029 if (ipu_rot_use_count == 0)
1030 ipu_conf &= ~IPU_CONF_ROT_EN;
1031 if (ipu_dc_use_count == 0)
1032 ipu_conf &= ~IPU_CONF_DC_EN;
1033 if (ipu_dp_use_count == 0)
1034 ipu_conf &= ~IPU_CONF_DP_EN;
1035 if (ipu_dmfc_use_count == 0)
1036 ipu_conf &= ~IPU_CONF_DMFC_EN;
1037 if (ipu_di_use_count[0] == 0) {
1038 ipu_conf &= ~IPU_CONF_DI0_EN;
1040 if (ipu_di_use_count[1] == 0) {
1041 ipu_conf &= ~IPU_CONF_DI1_EN;
1043 if (ipu_smfc_use_count == 0)
1044 ipu_conf &= ~IPU_CONF_SMFC_EN;
1046 __raw_writel(ipu_conf, IPU_CONF);
1048 spin_unlock_irqrestore(&ipu_lock, lock_flags);
1050 if (ipu_conf == 0) {
1051 clk_disable(g_ipu_clk);
1052 g_ipu_clk_enabled = false;
1055 WARN_ON(ipu_ic_use_count < 0);
1056 WARN_ON(ipu_vdi_use_count < 0);
1057 WARN_ON(ipu_rot_use_count < 0);
1058 WARN_ON(ipu_dc_use_count < 0);
1059 WARN_ON(ipu_dp_use_count < 0);
1060 WARN_ON(ipu_dmfc_use_count < 0);
1061 WARN_ON(ipu_smfc_use_count < 0);
1063 EXPORT_SYMBOL(ipu_uninit_channel);
1066 * This function is called to initialize buffer(s) for logical IPU channel.
1068 * @param channel Input parameter for the logical channel ID.
1070 * @param type Input parameter which buffer to initialize.
1072 * @param pixel_fmt Input parameter for pixel format of buffer.
1073 * Pixel format is a FOURCC ASCII code.
1075 * @param width Input parameter for width of buffer in pixels.
1077 * @param height Input parameter for height of buffer in pixels.
1079 * @param stride Input parameter for stride length of buffer
1082 * @param rot_mode Input parameter for rotation setting of buffer.
1083 * A rotation setting other than
1084 * IPU_ROTATE_VERT_FLIP
1085 * should only be used for input buffers of
1086 * rotation channels.
1088 * @param phyaddr_0 Input parameter buffer 0 physical address.
1090 * @param phyaddr_1 Input parameter buffer 1 physical address.
1091 * Setting this to a value other than NULL enables
1092 * double buffering mode.
1094 * @param phyaddr_2 Input parameter buffer 2 physical address.
1095 * Setting this to a value other than NULL enables
1096 * triple buffering mode, phyaddr_1 should not be
1099 * @param u private u offset for additional cropping,
1102 * @param v private v offset for additional cropping,
1105 * @return Returns 0 on success or negative error code on fail
1107 int32_t ipu_init_channel_buffer(ipu_channel_t channel, ipu_buffer_t type,
1109 uint16_t width, uint16_t height,
1111 ipu_rotate_mode_t rot_mode,
1112 dma_addr_t phyaddr_0, dma_addr_t phyaddr_1,
1113 dma_addr_t phyaddr_2,
1114 uint32_t u, uint32_t v)
1116 unsigned long lock_flags;
1119 uint32_t burst_size;
1121 dma_chan = channel_2_dma(channel, type);
1122 if (!idma_is_valid(dma_chan))
1125 if (stride < width * bytes_per_pixel(pixel_fmt))
1126 stride = width * bytes_per_pixel(pixel_fmt);
1130 "Stride not 32-bit aligned, stride = %d\n", stride);
1133 /* IC & IRT channels' width must be multiple of 8 pixels */
1134 if ((_ipu_is_ic_chan(dma_chan) || _ipu_is_irt_chan(dma_chan))
1136 dev_err(g_ipu_dev, "Width must be 8 pixel multiple\n");
1140 /* IPUv3EX and IPUv3M support triple buffer */
1141 if ((!_ipu_is_trb_chan(dma_chan)) && phyaddr_2) {
1142 dev_err(g_ipu_dev, "Chan%d doesn't support triple buffer "
1143 "mode\n", dma_chan);
1146 if (!phyaddr_1 && phyaddr_2) {
1147 dev_err(g_ipu_dev, "Chan%d's buf1 physical addr is NULL for "
1148 "triple buffer mode\n", dma_chan);
1152 /* Build parameter memory data for DMA channel */
1153 _ipu_ch_param_init(dma_chan, pixel_fmt, width, height, stride, u, v, 0,
1154 phyaddr_0, phyaddr_1, phyaddr_2);
1156 /* Set correlative channel parameter of local alpha channel */
1157 if ((_ipu_is_ic_graphic_chan(dma_chan) ||
1158 _ipu_is_dp_graphic_chan(dma_chan)) &&
1159 (g_thrd_chan_en[IPU_CHAN_ID(channel)] == true)) {
1160 _ipu_ch_param_set_alpha_use_separate_channel(dma_chan, true);
1161 _ipu_ch_param_set_alpha_buffer_memory(dma_chan);
1162 _ipu_ch_param_set_alpha_condition_read(dma_chan);
1163 /* fix alpha width as 8 and burst size as 16*/
1164 _ipu_ch_params_set_alpha_width(dma_chan, 8);
1165 _ipu_ch_param_set_burst_size(dma_chan, 16);
1166 } else if (_ipu_is_ic_graphic_chan(dma_chan) &&
1167 ipu_pixel_format_has_alpha(pixel_fmt))
1168 _ipu_ch_param_set_alpha_use_separate_channel(dma_chan, false);
1171 _ipu_ch_param_set_rotation(dma_chan, rot_mode);
1173 /* IC and ROT channels have restriction of 8 or 16 pix burst length */
1174 if (_ipu_is_ic_chan(dma_chan)) {
1175 if ((width % 16) == 0)
1176 _ipu_ch_param_set_burst_size(dma_chan, 16);
1178 _ipu_ch_param_set_burst_size(dma_chan, 8);
1179 } else if (_ipu_is_irt_chan(dma_chan)) {
1180 _ipu_ch_param_set_burst_size(dma_chan, 8);
1181 _ipu_ch_param_set_block_mode(dma_chan);
1182 } else if (_ipu_is_dmfc_chan(dma_chan)) {
1183 burst_size = _ipu_ch_param_get_burst_size(dma_chan);
1184 spin_lock_irqsave(&ipu_lock, lock_flags);
1185 _ipu_dmfc_set_wait4eot(dma_chan, width);
1186 _ipu_dmfc_set_burst_size(dma_chan, burst_size);
1187 spin_unlock_irqrestore(&ipu_lock, lock_flags);
1190 if (_ipu_disp_chan_is_interlaced(channel) ||
1191 g_chan_is_interlaced[dma_chan])
1192 _ipu_ch_param_set_interlaced_scan(dma_chan);
1194 if (_ipu_is_ic_chan(dma_chan) || _ipu_is_irt_chan(dma_chan)) {
1195 burst_size = _ipu_ch_param_get_burst_size(dma_chan);
1196 _ipu_ic_idma_init(dma_chan, width, height, burst_size,
1198 } else if (_ipu_is_smfc_chan(dma_chan)) {
1199 burst_size = _ipu_ch_param_get_burst_size(dma_chan);
1200 if ((pixel_fmt == IPU_PIX_FMT_GENERIC) &&
1201 ((_ipu_ch_param_get_bpp(dma_chan) == 5) ||
1202 (_ipu_ch_param_get_bpp(dma_chan) == 3)))
1203 burst_size = burst_size >> 4;
1205 burst_size = burst_size >> 2;
1206 _ipu_smfc_set_burst_size(channel, burst_size-1);
1209 if (idma_is_set(IDMAC_CHA_PRI, dma_chan) && !cpu_is_mx53())
1210 _ipu_ch_param_set_high_priority(dma_chan);
1212 _ipu_ch_param_dump(dma_chan);
1214 spin_lock_irqsave(&ipu_lock, lock_flags);
1215 if (phyaddr_2 && g_ipu_hw_rev >= 2) {
1216 reg = __raw_readl(IPU_CHA_DB_MODE_SEL(dma_chan));
1217 reg &= ~idma_mask(dma_chan);
1218 __raw_writel(reg, IPU_CHA_DB_MODE_SEL(dma_chan));
1220 reg = __raw_readl(IPU_CHA_TRB_MODE_SEL(dma_chan));
1221 reg |= idma_mask(dma_chan);
1222 __raw_writel(reg, IPU_CHA_TRB_MODE_SEL(dma_chan));
1224 /* Set IDMAC third buffer's cpmem number */
1225 /* See __ipu_ch_get_third_buf_cpmem_num() for mapping */
1226 __raw_writel(0x00444047L, IDMAC_SUB_ADDR_4);
1227 __raw_writel(0x46004241L, IDMAC_SUB_ADDR_3);
1228 __raw_writel(0x00000045L, IDMAC_SUB_ADDR_1);
1230 /* Reset to buffer 0 */
1231 __raw_writel(tri_cur_buf_mask(dma_chan),
1232 IPU_CHA_TRIPLE_CUR_BUF(dma_chan));
1234 reg = __raw_readl(IPU_CHA_TRB_MODE_SEL(dma_chan));
1235 reg &= ~idma_mask(dma_chan);
1236 __raw_writel(reg, IPU_CHA_TRB_MODE_SEL(dma_chan));
1238 reg = __raw_readl(IPU_CHA_DB_MODE_SEL(dma_chan));
1240 reg |= idma_mask(dma_chan);
1242 reg &= ~idma_mask(dma_chan);
1243 __raw_writel(reg, IPU_CHA_DB_MODE_SEL(dma_chan));
1245 /* Reset to buffer 0 */
1246 __raw_writel(idma_mask(dma_chan),
1247 IPU_CHA_CUR_BUF(dma_chan));
1250 spin_unlock_irqrestore(&ipu_lock, lock_flags);
1254 EXPORT_SYMBOL(ipu_init_channel_buffer);
1257 * This function is called to update the physical address of a buffer for
1258 * a logical IPU channel.
1260 * @param channel Input parameter for the logical channel ID.
1262 * @param type Input parameter which buffer to initialize.
1264 * @param bufNum Input parameter for buffer number to update.
1265 * 0 or 1 are the only valid values.
1267 * @param phyaddr Input parameter buffer physical address.
1269 * @return This function returns 0 on success or negative error code on
1270 * fail. This function will fail if the buffer is set to ready.
1272 int32_t ipu_update_channel_buffer(ipu_channel_t channel, ipu_buffer_t type,
1273 uint32_t bufNum, dma_addr_t phyaddr)
1277 unsigned long lock_flags;
1278 uint32_t dma_chan = channel_2_dma(channel, type);
1279 if (dma_chan == IDMA_CHAN_INVALID)
1282 spin_lock_irqsave(&ipu_lock, lock_flags);
1285 reg = __raw_readl(IPU_CHA_BUF0_RDY(dma_chan));
1286 else if (bufNum == 1)
1287 reg = __raw_readl(IPU_CHA_BUF1_RDY(dma_chan));
1289 reg = __raw_readl(IPU_CHA_BUF2_RDY(dma_chan));
1291 if ((reg & idma_mask(dma_chan)) == 0)
1292 _ipu_ch_param_set_buffer(dma_chan, bufNum, phyaddr);
1296 spin_unlock_irqrestore(&ipu_lock, lock_flags);
1299 EXPORT_SYMBOL(ipu_update_channel_buffer);
1303 * This function is called to initialize a buffer for logical IPU channel.
1305 * @param channel Input parameter for the logical channel ID.
1307 * @param type Input parameter which buffer to initialize.
1309 * @param pixel_fmt Input parameter for pixel format of buffer.
1310 * Pixel format is a FOURCC ASCII code.
1312 * @param width Input parameter for width of buffer in pixels.
1314 * @param height Input parameter for height of buffer in pixels.
1316 * @param stride Input parameter for stride length of buffer
1319 * @param u predefined private u offset for additional cropping,
1322 * @param v predefined private v offset for additional cropping,
1325 * @param vertical_offset vertical offset for Y coordinate
1326 * in the existed frame
1329 * @param horizontal_offset horizontal offset for X coordinate
1330 * in the existed frame
1333 * @return Returns 0 on success or negative error code on fail
1334 * This function will fail if any buffer is set to ready.
1337 int32_t ipu_update_channel_offset(ipu_channel_t channel, ipu_buffer_t type,
1339 uint16_t width, uint16_t height,
1341 uint32_t u, uint32_t v,
1342 uint32_t vertical_offset, uint32_t horizontal_offset)
1345 unsigned long lock_flags;
1346 uint32_t dma_chan = channel_2_dma(channel, type);
1348 if (dma_chan == IDMA_CHAN_INVALID)
1351 spin_lock_irqsave(&ipu_lock, lock_flags);
1353 if ((__raw_readl(IPU_CHA_BUF0_RDY(dma_chan)) & idma_mask(dma_chan)) ||
1354 (__raw_readl(IPU_CHA_BUF1_RDY(dma_chan)) & idma_mask(dma_chan)) ||
1355 ((__raw_readl(IPU_CHA_BUF2_RDY(dma_chan)) & idma_mask(dma_chan)) &&
1356 (__raw_readl(IPU_CHA_TRB_MODE_SEL(dma_chan)) & idma_mask(dma_chan)) &&
1357 _ipu_is_trb_chan(dma_chan)))
1360 _ipu_ch_offset_update(dma_chan, pixel_fmt, width, height, stride,
1361 u, v, 0, vertical_offset, horizontal_offset);
1363 spin_unlock_irqrestore(&ipu_lock, lock_flags);
1366 EXPORT_SYMBOL(ipu_update_channel_offset);
1370 * This function is called to set a channel's buffer as ready.
1372 * @param channel Input parameter for the logical channel ID.
1374 * @param type Input parameter which buffer to initialize.
1376 * @param bufNum Input parameter for which buffer number set to
1379 * @return Returns 0 on success or negative error code on fail
1381 int32_t ipu_select_buffer(ipu_channel_t channel, ipu_buffer_t type,
1384 uint32_t dma_chan = channel_2_dma(channel, type);
1385 unsigned long lock_flags;
1387 if (dma_chan == IDMA_CHAN_INVALID)
1390 /* Mark buffer to be ready. */
1391 spin_lock_irqsave(&ipu_lock, lock_flags);
1393 __raw_writel(idma_mask(dma_chan),
1394 IPU_CHA_BUF0_RDY(dma_chan));
1395 else if (bufNum == 1)
1396 __raw_writel(idma_mask(dma_chan),
1397 IPU_CHA_BUF1_RDY(dma_chan));
1399 __raw_writel(idma_mask(dma_chan),
1400 IPU_CHA_BUF2_RDY(dma_chan));
1401 spin_unlock_irqrestore(&ipu_lock, lock_flags);
1404 EXPORT_SYMBOL(ipu_select_buffer);
1407 * This function is called to set a channel's buffer as ready.
1409 * @param bufNum Input parameter for which buffer number set to
1412 * @return Returns 0 on success or negative error code on fail
1414 int32_t ipu_select_multi_vdi_buffer(uint32_t bufNum)
1417 uint32_t dma_chan = channel_2_dma(MEM_VDI_PRP_VF_MEM, IPU_INPUT_BUFFER);
1419 idma_mask(channel_2_dma(MEM_VDI_PRP_VF_MEM_P, IPU_INPUT_BUFFER))|
1420 idma_mask(dma_chan)|
1421 idma_mask(channel_2_dma(MEM_VDI_PRP_VF_MEM_N, IPU_INPUT_BUFFER));
1422 unsigned long lock_flags;
1424 /* Mark buffers to be ready. */
1425 spin_lock_irqsave(&ipu_lock, lock_flags);
1427 __raw_writel(mask_bit, IPU_CHA_BUF0_RDY(dma_chan));
1429 __raw_writel(mask_bit, IPU_CHA_BUF1_RDY(dma_chan));
1430 spin_unlock_irqrestore(&ipu_lock, lock_flags);
1433 EXPORT_SYMBOL(ipu_select_multi_vdi_buffer);
1436 static int proc_dest_sel[] = {
1437 0, 1, 1, 3, 5, 5, 4, 7, 8, 9, 10, 11, 12, 14, 15, 16,
1438 0, 1, 1, 5, 5, 5, 5, 5, 7, 8, 9, 10, 11, 12, 14, 31 };
1439 static int proc_src_sel[] = { 0, 6, 7, 6, 7, 8, 5, NA, NA, NA,
1440 NA, NA, NA, NA, NA, 1, 2, 3, 4, 7, 8, NA, 8, NA };
1441 static int disp_src_sel[] = { 0, 6, 7, 8, 3, 4, 5, NA, NA, NA,
1442 NA, NA, NA, NA, NA, 1, NA, 2, NA, 3, 4, 4, 4, 4 };
1446 * This function links 2 channels together for automatic frame
1447 * synchronization. The output of the source channel is linked to the input of
1448 * the destination channel.
1450 * @param src_ch Input parameter for the logical channel ID of
1451 * the source channel.
1453 * @param dest_ch Input parameter for the logical channel ID of
1454 * the destination channel.
1456 * @return This function returns 0 on success or negative error code on
1459 int32_t ipu_link_channels(ipu_channel_t src_ch, ipu_channel_t dest_ch)
1462 unsigned long lock_flags;
1463 uint32_t fs_proc_flow1;
1464 uint32_t fs_proc_flow2;
1465 uint32_t fs_proc_flow3;
1466 uint32_t fs_disp_flow1;
1468 spin_lock_irqsave(&ipu_lock, lock_flags);
1470 fs_proc_flow1 = __raw_readl(IPU_FS_PROC_FLOW1);
1471 fs_proc_flow2 = __raw_readl(IPU_FS_PROC_FLOW2);
1472 fs_proc_flow3 = __raw_readl(IPU_FS_PROC_FLOW3);
1473 fs_disp_flow1 = __raw_readl(IPU_FS_DISP_FLOW1);
1477 fs_proc_flow3 &= ~FS_SMFC0_DEST_SEL_MASK;
1479 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1480 FS_SMFC0_DEST_SEL_OFFSET;
1483 fs_proc_flow3 &= ~FS_SMFC1_DEST_SEL_MASK;
1485 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1486 FS_SMFC1_DEST_SEL_OFFSET;
1489 fs_proc_flow3 &= ~FS_SMFC2_DEST_SEL_MASK;
1491 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1492 FS_SMFC2_DEST_SEL_OFFSET;
1495 fs_proc_flow3 &= ~FS_SMFC3_DEST_SEL_MASK;
1497 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1498 FS_SMFC3_DEST_SEL_OFFSET;
1500 case CSI_PRP_ENC_MEM:
1501 fs_proc_flow2 &= ~FS_PRPENC_DEST_SEL_MASK;
1503 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1504 FS_PRPENC_DEST_SEL_OFFSET;
1506 case CSI_PRP_VF_MEM:
1507 fs_proc_flow2 &= ~FS_PRPVF_DEST_SEL_MASK;
1509 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1510 FS_PRPVF_DEST_SEL_OFFSET;
1513 fs_proc_flow2 &= ~FS_PP_DEST_SEL_MASK;
1515 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1516 FS_PP_DEST_SEL_OFFSET;
1518 case MEM_ROT_PP_MEM:
1519 fs_proc_flow2 &= ~FS_PP_ROT_DEST_SEL_MASK;
1521 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1522 FS_PP_ROT_DEST_SEL_OFFSET;
1524 case MEM_PRP_ENC_MEM:
1525 fs_proc_flow2 &= ~FS_PRPENC_DEST_SEL_MASK;
1527 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1528 FS_PRPENC_DEST_SEL_OFFSET;
1530 case MEM_ROT_ENC_MEM:
1531 fs_proc_flow2 &= ~FS_PRPENC_ROT_DEST_SEL_MASK;
1533 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1534 FS_PRPENC_ROT_DEST_SEL_OFFSET;
1536 case MEM_PRP_VF_MEM:
1537 fs_proc_flow2 &= ~FS_PRPVF_DEST_SEL_MASK;
1539 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1540 FS_PRPVF_DEST_SEL_OFFSET;
1542 case MEM_VDI_PRP_VF_MEM:
1543 fs_proc_flow2 &= ~FS_PRPVF_DEST_SEL_MASK;
1545 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1546 FS_PRPVF_DEST_SEL_OFFSET;
1548 case MEM_ROT_VF_MEM:
1549 fs_proc_flow2 &= ~FS_PRPVF_ROT_DEST_SEL_MASK;
1551 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1552 FS_PRPVF_ROT_DEST_SEL_OFFSET;
1561 fs_proc_flow1 &= ~FS_PP_SRC_SEL_MASK;
1563 proc_src_sel[IPU_CHAN_ID(src_ch)] << FS_PP_SRC_SEL_OFFSET;
1565 case MEM_ROT_PP_MEM:
1566 fs_proc_flow1 &= ~FS_PP_ROT_SRC_SEL_MASK;
1568 proc_src_sel[IPU_CHAN_ID(src_ch)] <<
1569 FS_PP_ROT_SRC_SEL_OFFSET;
1571 case MEM_PRP_ENC_MEM:
1572 fs_proc_flow1 &= ~FS_PRP_SRC_SEL_MASK;
1574 proc_src_sel[IPU_CHAN_ID(src_ch)] << FS_PRP_SRC_SEL_OFFSET;
1576 case MEM_ROT_ENC_MEM:
1577 fs_proc_flow1 &= ~FS_PRPENC_ROT_SRC_SEL_MASK;
1579 proc_src_sel[IPU_CHAN_ID(src_ch)] <<
1580 FS_PRPENC_ROT_SRC_SEL_OFFSET;
1582 case MEM_PRP_VF_MEM:
1583 fs_proc_flow1 &= ~FS_PRP_SRC_SEL_MASK;
1585 proc_src_sel[IPU_CHAN_ID(src_ch)] << FS_PRP_SRC_SEL_OFFSET;
1587 case MEM_VDI_PRP_VF_MEM:
1588 fs_proc_flow1 &= ~FS_PRP_SRC_SEL_MASK;
1590 proc_src_sel[IPU_CHAN_ID(src_ch)] << FS_PRP_SRC_SEL_OFFSET;
1592 case MEM_ROT_VF_MEM:
1593 fs_proc_flow1 &= ~FS_PRPVF_ROT_SRC_SEL_MASK;
1595 proc_src_sel[IPU_CHAN_ID(src_ch)] <<
1596 FS_PRPVF_ROT_SRC_SEL_OFFSET;
1599 fs_disp_flow1 &= ~FS_DC1_SRC_SEL_MASK;
1601 disp_src_sel[IPU_CHAN_ID(src_ch)] << FS_DC1_SRC_SEL_OFFSET;
1604 fs_disp_flow1 &= ~FS_DP_SYNC0_SRC_SEL_MASK;
1606 disp_src_sel[IPU_CHAN_ID(src_ch)] <<
1607 FS_DP_SYNC0_SRC_SEL_OFFSET;
1610 fs_disp_flow1 &= ~FS_DP_SYNC1_SRC_SEL_MASK;
1612 disp_src_sel[IPU_CHAN_ID(src_ch)] <<
1613 FS_DP_SYNC1_SRC_SEL_OFFSET;
1616 fs_disp_flow1 &= ~FS_DC2_SRC_SEL_MASK;
1618 disp_src_sel[IPU_CHAN_ID(src_ch)] << FS_DC2_SRC_SEL_OFFSET;
1621 fs_disp_flow1 &= ~FS_DP_ASYNC0_SRC_SEL_MASK;
1623 disp_src_sel[IPU_CHAN_ID(src_ch)] <<
1624 FS_DP_ASYNC0_SRC_SEL_OFFSET;
1627 fs_disp_flow1 &= ~FS_DP_ASYNC1_SRC_SEL_MASK;
1629 disp_src_sel[IPU_CHAN_ID(src_ch)] <<
1630 FS_DP_ASYNC1_SRC_SEL_OFFSET;
1637 __raw_writel(fs_proc_flow1, IPU_FS_PROC_FLOW1);
1638 __raw_writel(fs_proc_flow2, IPU_FS_PROC_FLOW2);
1639 __raw_writel(fs_proc_flow3, IPU_FS_PROC_FLOW3);
1640 __raw_writel(fs_disp_flow1, IPU_FS_DISP_FLOW1);
1643 spin_unlock_irqrestore(&ipu_lock, lock_flags);
1646 EXPORT_SYMBOL(ipu_link_channels);
1649 * This function unlinks 2 channels and disables automatic frame
1652 * @param src_ch Input parameter for the logical channel ID of
1653 * the source channel.
1655 * @param dest_ch Input parameter for the logical channel ID of
1656 * the destination channel.
1658 * @return This function returns 0 on success or negative error code on
1661 int32_t ipu_unlink_channels(ipu_channel_t src_ch, ipu_channel_t dest_ch)
1664 unsigned long lock_flags;
1665 uint32_t fs_proc_flow1;
1666 uint32_t fs_proc_flow2;
1667 uint32_t fs_proc_flow3;
1668 uint32_t fs_disp_flow1;
1670 spin_lock_irqsave(&ipu_lock, lock_flags);
1672 fs_proc_flow1 = __raw_readl(IPU_FS_PROC_FLOW1);
1673 fs_proc_flow2 = __raw_readl(IPU_FS_PROC_FLOW2);
1674 fs_proc_flow3 = __raw_readl(IPU_FS_PROC_FLOW3);
1675 fs_disp_flow1 = __raw_readl(IPU_FS_DISP_FLOW1);
1679 fs_proc_flow3 &= ~FS_SMFC0_DEST_SEL_MASK;
1682 fs_proc_flow3 &= ~FS_SMFC1_DEST_SEL_MASK;
1685 fs_proc_flow3 &= ~FS_SMFC2_DEST_SEL_MASK;
1688 fs_proc_flow3 &= ~FS_SMFC3_DEST_SEL_MASK;
1690 case CSI_PRP_ENC_MEM:
1691 fs_proc_flow2 &= ~FS_PRPENC_DEST_SEL_MASK;
1693 case CSI_PRP_VF_MEM:
1694 fs_proc_flow2 &= ~FS_PRPVF_DEST_SEL_MASK;
1697 fs_proc_flow2 &= ~FS_PP_DEST_SEL_MASK;
1699 case MEM_ROT_PP_MEM:
1700 fs_proc_flow2 &= ~FS_PP_ROT_DEST_SEL_MASK;
1702 case MEM_PRP_ENC_MEM:
1703 fs_proc_flow2 &= ~FS_PRPENC_DEST_SEL_MASK;
1705 case MEM_ROT_ENC_MEM:
1706 fs_proc_flow2 &= ~FS_PRPENC_ROT_DEST_SEL_MASK;
1708 case MEM_PRP_VF_MEM:
1709 fs_proc_flow2 &= ~FS_PRPVF_DEST_SEL_MASK;
1711 case MEM_VDI_PRP_VF_MEM:
1712 fs_proc_flow2 &= ~FS_PRPVF_DEST_SEL_MASK;
1714 case MEM_ROT_VF_MEM:
1715 fs_proc_flow2 &= ~FS_PRPVF_ROT_DEST_SEL_MASK;
1724 fs_proc_flow1 &= ~FS_PP_SRC_SEL_MASK;
1726 case MEM_ROT_PP_MEM:
1727 fs_proc_flow1 &= ~FS_PP_ROT_SRC_SEL_MASK;
1729 case MEM_PRP_ENC_MEM:
1730 fs_proc_flow1 &= ~FS_PRP_SRC_SEL_MASK;
1732 case MEM_ROT_ENC_MEM:
1733 fs_proc_flow1 &= ~FS_PRPENC_ROT_SRC_SEL_MASK;
1735 case MEM_PRP_VF_MEM:
1736 fs_proc_flow1 &= ~FS_PRP_SRC_SEL_MASK;
1738 case MEM_VDI_PRP_VF_MEM:
1739 fs_proc_flow1 &= ~FS_PRP_SRC_SEL_MASK;
1741 case MEM_ROT_VF_MEM:
1742 fs_proc_flow1 &= ~FS_PRPVF_ROT_SRC_SEL_MASK;
1745 fs_disp_flow1 &= ~FS_DC1_SRC_SEL_MASK;
1748 fs_disp_flow1 &= ~FS_DP_SYNC0_SRC_SEL_MASK;
1751 fs_disp_flow1 &= ~FS_DP_SYNC1_SRC_SEL_MASK;
1754 fs_disp_flow1 &= ~FS_DC2_SRC_SEL_MASK;
1757 fs_disp_flow1 &= ~FS_DP_ASYNC0_SRC_SEL_MASK;
1760 fs_disp_flow1 &= ~FS_DP_ASYNC1_SRC_SEL_MASK;
1767 __raw_writel(fs_proc_flow1, IPU_FS_PROC_FLOW1);
1768 __raw_writel(fs_proc_flow2, IPU_FS_PROC_FLOW2);
1769 __raw_writel(fs_proc_flow3, IPU_FS_PROC_FLOW3);
1770 __raw_writel(fs_disp_flow1, IPU_FS_DISP_FLOW1);
1773 spin_unlock_irqrestore(&ipu_lock, lock_flags);
1776 EXPORT_SYMBOL(ipu_unlink_channels);
1779 * This function check whether a logical channel was enabled.
1781 * @param channel Input parameter for the logical channel ID.
1783 * @return This function returns 1 while request channel is enabled or
1784 * 0 for not enabled.
1786 int32_t ipu_is_channel_busy(ipu_channel_t channel)
1792 out_dma = channel_2_dma(channel, IPU_OUTPUT_BUFFER);
1793 in_dma = channel_2_dma(channel, IPU_VIDEO_IN_BUFFER);
1795 reg = __raw_readl(IDMAC_CHA_EN(in_dma));
1796 if (reg & idma_mask(in_dma))
1798 reg = __raw_readl(IDMAC_CHA_EN(out_dma));
1799 if (reg & idma_mask(out_dma))
1803 EXPORT_SYMBOL(ipu_is_channel_busy);
1806 * This function enables a logical channel.
1808 * @param channel Input parameter for the logical channel ID.
1810 * @return This function returns 0 on success or negative error code on
1813 int32_t ipu_enable_channel(ipu_channel_t channel)
1816 unsigned long lock_flags;
1823 spin_lock_irqsave(&ipu_lock, lock_flags);
1825 if (g_channel_enable_mask & (1L << IPU_CHAN_ID(channel))) {
1826 dev_err(g_ipu_dev, "Warning: channel already enabled %d\n",
1827 IPU_CHAN_ID(channel));
1828 spin_unlock_irqrestore(&ipu_lock, lock_flags);
1832 /* Get input and output dma channels */
1833 out_dma = channel_2_dma(channel, IPU_OUTPUT_BUFFER);
1834 in_dma = channel_2_dma(channel, IPU_VIDEO_IN_BUFFER);
1836 ipu_conf = __raw_readl(IPU_CONF);
1837 if (ipu_di_use_count[0] > 0) {
1838 ipu_conf |= IPU_CONF_DI0_EN;
1840 if (ipu_di_use_count[1] > 0) {
1841 ipu_conf |= IPU_CONF_DI1_EN;
1843 if (ipu_dp_use_count > 0)
1844 ipu_conf |= IPU_CONF_DP_EN;
1845 if (ipu_dc_use_count > 0)
1846 ipu_conf |= IPU_CONF_DC_EN;
1847 if (ipu_dmfc_use_count > 0)
1848 ipu_conf |= IPU_CONF_DMFC_EN;
1849 if (ipu_ic_use_count > 0)
1850 ipu_conf |= IPU_CONF_IC_EN;
1851 if (ipu_vdi_use_count > 0) {
1852 ipu_conf |= IPU_CONF_ISP_EN;
1853 ipu_conf |= IPU_CONF_VDI_EN;
1854 ipu_conf |= IPU_CONF_IC_INPUT;
1856 if (ipu_rot_use_count > 0)
1857 ipu_conf |= IPU_CONF_ROT_EN;
1858 if (ipu_smfc_use_count > 0)
1859 ipu_conf |= IPU_CONF_SMFC_EN;
1860 __raw_writel(ipu_conf, IPU_CONF);
1862 if (idma_is_valid(in_dma)) {
1863 reg = __raw_readl(IDMAC_CHA_EN(in_dma));
1864 __raw_writel(reg | idma_mask(in_dma), IDMAC_CHA_EN(in_dma));
1866 if (idma_is_valid(out_dma)) {
1867 reg = __raw_readl(IDMAC_CHA_EN(out_dma));
1868 __raw_writel(reg | idma_mask(out_dma), IDMAC_CHA_EN(out_dma));
1871 if ((g_sec_chan_en[IPU_CHAN_ID(channel)]) &&
1872 ((channel == MEM_PP_MEM) || (channel == MEM_PRP_VF_MEM) ||
1873 (channel == MEM_VDI_PRP_VF_MEM))) {
1874 sec_dma = channel_2_dma(channel, IPU_GRAPH_IN_BUFFER);
1875 reg = __raw_readl(IDMAC_CHA_EN(sec_dma));
1876 __raw_writel(reg | idma_mask(sec_dma), IDMAC_CHA_EN(sec_dma));
1878 if ((g_thrd_chan_en[IPU_CHAN_ID(channel)]) &&
1879 ((channel == MEM_PP_MEM) || (channel == MEM_PRP_VF_MEM))) {
1880 thrd_dma = channel_2_dma(channel, IPU_ALPHA_IN_BUFFER);
1881 reg = __raw_readl(IDMAC_CHA_EN(thrd_dma));
1882 __raw_writel(reg | idma_mask(thrd_dma), IDMAC_CHA_EN(thrd_dma));
1884 sec_dma = channel_2_dma(channel, IPU_GRAPH_IN_BUFFER);
1885 reg = __raw_readl(IDMAC_SEP_ALPHA);
1886 __raw_writel(reg | idma_mask(sec_dma), IDMAC_SEP_ALPHA);
1887 } else if ((g_thrd_chan_en[IPU_CHAN_ID(channel)]) &&
1888 ((channel == MEM_BG_SYNC) || (channel == MEM_FG_SYNC))) {
1889 thrd_dma = channel_2_dma(channel, IPU_ALPHA_IN_BUFFER);
1890 reg = __raw_readl(IDMAC_CHA_EN(thrd_dma));
1891 __raw_writel(reg | idma_mask(thrd_dma), IDMAC_CHA_EN(thrd_dma));
1892 reg = __raw_readl(IDMAC_SEP_ALPHA);
1893 __raw_writel(reg | idma_mask(in_dma), IDMAC_SEP_ALPHA);
1896 if ((channel == MEM_DC_SYNC) || (channel == MEM_BG_SYNC) ||
1897 (channel == MEM_FG_SYNC)) {
1898 reg = __raw_readl(IDMAC_WM_EN(in_dma));
1899 __raw_writel(reg | idma_mask(in_dma), IDMAC_WM_EN(in_dma));
1901 _ipu_dp_dc_enable(channel);
1904 if (_ipu_is_ic_chan(in_dma) || _ipu_is_ic_chan(out_dma) ||
1905 _ipu_is_irt_chan(in_dma) || _ipu_is_irt_chan(out_dma))
1906 _ipu_ic_enable_task(channel);
1908 g_channel_enable_mask |= 1L << IPU_CHAN_ID(channel);
1910 spin_unlock_irqrestore(&ipu_lock, lock_flags);
1914 EXPORT_SYMBOL(ipu_enable_channel);
1917 * This function check buffer ready for a logical channel.
1919 * @param channel Input parameter for the logical channel ID.
1921 * @param type Input parameter which buffer to clear.
1923 * @param bufNum Input parameter for which buffer number clear
1927 int32_t ipu_check_buffer_ready(ipu_channel_t channel, ipu_buffer_t type,
1930 uint32_t dma_chan = channel_2_dma(channel, type);
1933 if (dma_chan == IDMA_CHAN_INVALID)
1937 reg = __raw_readl(IPU_CHA_BUF0_RDY(dma_chan));
1938 else if (bufNum == 1)
1939 reg = __raw_readl(IPU_CHA_BUF1_RDY(dma_chan));
1941 reg = __raw_readl(IPU_CHA_BUF2_RDY(dma_chan));
1943 if (reg & idma_mask(dma_chan))
1948 EXPORT_SYMBOL(ipu_check_buffer_ready);
1951 * This function clear buffer ready for a logical channel.
1953 * @param channel Input parameter for the logical channel ID.
1955 * @param type Input parameter which buffer to clear.
1957 * @param bufNum Input parameter for which buffer number clear
1961 void ipu_clear_buffer_ready(ipu_channel_t channel, ipu_buffer_t type,
1964 unsigned long lock_flags;
1965 uint32_t dma_ch = channel_2_dma(channel, type);
1967 if (!idma_is_valid(dma_ch))
1970 spin_lock_irqsave(&ipu_lock, lock_flags);
1971 __raw_writel(0xF0300000, IPU_GPR); /* write one to clear */
1973 if (idma_is_set(IPU_CHA_BUF0_RDY, dma_ch)) {
1974 __raw_writel(idma_mask(dma_ch),
1975 IPU_CHA_BUF0_RDY(dma_ch));
1977 } else if (bufNum == 1) {
1978 if (idma_is_set(IPU_CHA_BUF1_RDY, dma_ch)) {
1979 __raw_writel(idma_mask(dma_ch),
1980 IPU_CHA_BUF1_RDY(dma_ch));
1983 if (idma_is_set(IPU_CHA_BUF2_RDY, dma_ch)) {
1984 __raw_writel(idma_mask(dma_ch),
1985 IPU_CHA_BUF2_RDY(dma_ch));
1988 __raw_writel(0x0, IPU_GPR); /* write one to set */
1989 spin_unlock_irqrestore(&ipu_lock, lock_flags);
1991 EXPORT_SYMBOL(ipu_clear_buffer_ready);
1993 static irqreturn_t disable_chan_irq_handler(int irq, void *dev_id)
1995 struct completion *comp = dev_id;
2002 * This function disables a logical channel.
2004 * @param channel Input parameter for the logical channel ID.
2006 * @param wait_for_stop Flag to set whether to wait for channel end
2007 * of frame or return immediately.
2009 * @return This function returns 0 on success or negative error code on
2012 int32_t ipu_disable_channel(ipu_channel_t channel, bool wait_for_stop)
2015 unsigned long lock_flags;
2018 uint32_t sec_dma = NO_DMA;
2019 uint32_t thrd_dma = NO_DMA;
2021 spin_lock_irqsave(&ipu_lock, lock_flags);
2023 if ((g_channel_enable_mask & (1L << IPU_CHAN_ID(channel))) == 0) {
2024 dev_err(g_ipu_dev, "Channel already disabled %d\n",
2025 IPU_CHAN_ID(channel));
2026 spin_unlock_irqrestore(&ipu_lock, lock_flags);
2030 spin_unlock_irqrestore(&ipu_lock, lock_flags);
2032 /* Get input and output dma channels */
2033 out_dma = channel_2_dma(channel, IPU_OUTPUT_BUFFER);
2034 in_dma = channel_2_dma(channel, IPU_VIDEO_IN_BUFFER);
2036 if ((idma_is_valid(in_dma) &&
2037 !idma_is_set(IDMAC_CHA_EN, in_dma))
2038 && (idma_is_valid(out_dma) &&
2039 !idma_is_set(IDMAC_CHA_EN, out_dma)))
2042 if (g_sec_chan_en[IPU_CHAN_ID(channel)])
2043 sec_dma = channel_2_dma(channel, IPU_GRAPH_IN_BUFFER);
2044 if (g_thrd_chan_en[IPU_CHAN_ID(channel)]) {
2045 sec_dma = channel_2_dma(channel, IPU_GRAPH_IN_BUFFER);
2046 thrd_dma = channel_2_dma(channel, IPU_ALPHA_IN_BUFFER);
2049 if ((channel == MEM_BG_SYNC) || (channel == MEM_FG_SYNC) ||
2050 (channel == MEM_DC_SYNC)) {
2051 if (channel == MEM_FG_SYNC)
2052 ipu_disp_set_window_pos(channel, 0, 0);
2054 _ipu_dp_dc_disable(channel, false);
2057 * wait for BG channel EOF then disable FG-IDMAC,
2058 * it avoid FG NFB4EOF error.
2060 if (channel == MEM_FG_SYNC) {
2063 __raw_writel(IPUIRQ_2_MASK(IPU_IRQ_BG_SYNC_EOF),
2064 IPUIRQ_2_STATREG(IPU_IRQ_BG_SYNC_EOF));
2065 while ((__raw_readl(IPUIRQ_2_STATREG(IPU_IRQ_BG_SYNC_EOF)) &
2066 IPUIRQ_2_MASK(IPU_IRQ_BG_SYNC_EOF)) == 0) {
2070 dev_err(g_ipu_dev, "warning: wait for bg sync eof timeout\n");
2075 } else if (wait_for_stop) {
2076 while (idma_is_set(IDMAC_CHA_BUSY, in_dma) ||
2077 idma_is_set(IDMAC_CHA_BUSY, out_dma) ||
2078 (g_sec_chan_en[IPU_CHAN_ID(channel)] &&
2079 idma_is_set(IDMAC_CHA_BUSY, sec_dma)) ||
2080 (g_thrd_chan_en[IPU_CHAN_ID(channel)] &&
2081 idma_is_set(IDMAC_CHA_BUSY, thrd_dma))) {
2082 uint32_t ret, irq = 0xffffffff;
2083 DECLARE_COMPLETION_ONSTACK(disable_comp);
2085 if (idma_is_set(IDMAC_CHA_BUSY, out_dma))
2087 if (g_sec_chan_en[IPU_CHAN_ID(channel)] &&
2088 idma_is_set(IDMAC_CHA_BUSY, sec_dma))
2090 if (g_thrd_chan_en[IPU_CHAN_ID(channel)] &&
2091 idma_is_set(IDMAC_CHA_BUSY, thrd_dma))
2093 if (idma_is_set(IDMAC_CHA_BUSY, in_dma))
2096 if (irq == 0xffffffff) {
2097 dev_err(g_ipu_dev, "warning: no channel busy, break\n");
2100 ret = ipu_request_irq(irq, disable_chan_irq_handler, 0, NULL, &disable_comp);
2102 dev_err(g_ipu_dev, "irq %d in use\n", irq);
2105 ret = wait_for_completion_timeout(&disable_comp, msecs_to_jiffies(200));
2106 ipu_free_irq(irq, &disable_comp);
2108 ipu_dump_registers();
2109 dev_err(g_ipu_dev, "warning: disable ipu dma channel %d during its busy state\n", irq);
2116 spin_lock_irqsave(&ipu_lock, lock_flags);
2118 if ((channel == MEM_BG_SYNC) || (channel == MEM_FG_SYNC) ||
2119 (channel == MEM_DC_SYNC)) {
2120 reg = __raw_readl(IDMAC_WM_EN(in_dma));
2121 __raw_writel(reg & ~idma_mask(in_dma), IDMAC_WM_EN(in_dma));
2124 /* Disable IC task */
2125 if (_ipu_is_ic_chan(in_dma) || _ipu_is_ic_chan(out_dma) ||
2126 _ipu_is_irt_chan(in_dma) || _ipu_is_irt_chan(out_dma))
2127 _ipu_ic_disable_task(channel);
2129 /* Disable DMA channel(s) */
2130 if (idma_is_valid(in_dma)) {
2131 reg = __raw_readl(IDMAC_CHA_EN(in_dma));
2132 __raw_writel(reg & ~idma_mask(in_dma), IDMAC_CHA_EN(in_dma));
2133 __raw_writel(idma_mask(in_dma), IPU_CHA_CUR_BUF(in_dma));
2134 __raw_writel(tri_cur_buf_mask(in_dma),
2135 IPU_CHA_TRIPLE_CUR_BUF(in_dma));
2137 if (idma_is_valid(out_dma)) {
2138 reg = __raw_readl(IDMAC_CHA_EN(out_dma));
2139 __raw_writel(reg & ~idma_mask(out_dma), IDMAC_CHA_EN(out_dma));
2140 __raw_writel(idma_mask(out_dma), IPU_CHA_CUR_BUF(out_dma));
2141 __raw_writel(tri_cur_buf_mask(out_dma),
2142 IPU_CHA_TRIPLE_CUR_BUF(out_dma));
2144 if (g_sec_chan_en[IPU_CHAN_ID(channel)] && idma_is_valid(sec_dma)) {
2145 reg = __raw_readl(IDMAC_CHA_EN(sec_dma));
2146 __raw_writel(reg & ~idma_mask(sec_dma), IDMAC_CHA_EN(sec_dma));
2147 __raw_writel(idma_mask(sec_dma), IPU_CHA_CUR_BUF(sec_dma));
2149 if (g_thrd_chan_en[IPU_CHAN_ID(channel)] && idma_is_valid(thrd_dma)) {
2150 reg = __raw_readl(IDMAC_CHA_EN(thrd_dma));
2151 __raw_writel(reg & ~idma_mask(thrd_dma), IDMAC_CHA_EN(thrd_dma));
2152 if (channel == MEM_BG_SYNC || channel == MEM_FG_SYNC) {
2153 reg = __raw_readl(IDMAC_SEP_ALPHA);
2154 __raw_writel(reg & ~idma_mask(in_dma), IDMAC_SEP_ALPHA);
2156 reg = __raw_readl(IDMAC_SEP_ALPHA);
2157 __raw_writel(reg & ~idma_mask(sec_dma), IDMAC_SEP_ALPHA);
2159 __raw_writel(idma_mask(thrd_dma), IPU_CHA_CUR_BUF(thrd_dma));
2162 g_channel_enable_mask &= ~(1L << IPU_CHAN_ID(channel));
2164 spin_unlock_irqrestore(&ipu_lock, lock_flags);
2166 /* Set channel buffers NOT to be ready */
2167 if (idma_is_valid(in_dma)) {
2168 ipu_clear_buffer_ready(channel, IPU_VIDEO_IN_BUFFER, 0);
2169 ipu_clear_buffer_ready(channel, IPU_VIDEO_IN_BUFFER, 1);
2170 ipu_clear_buffer_ready(channel, IPU_VIDEO_IN_BUFFER, 2);
2172 if (idma_is_valid(out_dma)) {
2173 ipu_clear_buffer_ready(channel, IPU_OUTPUT_BUFFER, 0);
2174 ipu_clear_buffer_ready(channel, IPU_OUTPUT_BUFFER, 1);
2176 if (g_sec_chan_en[IPU_CHAN_ID(channel)] && idma_is_valid(sec_dma)) {
2177 ipu_clear_buffer_ready(channel, IPU_GRAPH_IN_BUFFER, 0);
2178 ipu_clear_buffer_ready(channel, IPU_GRAPH_IN_BUFFER, 1);
2180 if (g_thrd_chan_en[IPU_CHAN_ID(channel)] && idma_is_valid(thrd_dma)) {
2181 ipu_clear_buffer_ready(channel, IPU_ALPHA_IN_BUFFER, 0);
2182 ipu_clear_buffer_ready(channel, IPU_ALPHA_IN_BUFFER, 1);
2187 EXPORT_SYMBOL(ipu_disable_channel);
2190 * This function enables CSI.
2192 * @param csi csi num 0 or 1
2194 * @return This function returns 0 on success or negative error code on
2197 int32_t ipu_enable_csi(uint32_t csi)
2200 unsigned long lock_flags;
2203 dev_err(g_ipu_dev, "Wrong csi num_%d\n", csi);
2207 spin_lock_irqsave(&ipu_lock, lock_flags);
2208 ipu_csi_use_count[csi]++;
2210 if (ipu_csi_use_count[csi] == 1) {
2211 reg = __raw_readl(IPU_CONF);
2213 __raw_writel(reg | IPU_CONF_CSI0_EN, IPU_CONF);
2215 __raw_writel(reg | IPU_CONF_CSI1_EN, IPU_CONF);
2217 spin_unlock_irqrestore(&ipu_lock, lock_flags);
2220 EXPORT_SYMBOL(ipu_enable_csi);
2223 * This function disables CSI.
2225 * @param csi csi num 0 or 1
2227 * @return This function returns 0 on success or negative error code on
2230 int32_t ipu_disable_csi(uint32_t csi)
2233 unsigned long lock_flags;
2236 dev_err(g_ipu_dev, "Wrong csi num_%d\n", csi);
2240 spin_lock_irqsave(&ipu_lock, lock_flags);
2241 ipu_csi_use_count[csi]--;
2243 if (ipu_csi_use_count[csi] == 0) {
2244 reg = __raw_readl(IPU_CONF);
2246 __raw_writel(reg & ~IPU_CONF_CSI0_EN, IPU_CONF);
2248 __raw_writel(reg & ~IPU_CONF_CSI1_EN, IPU_CONF);
2250 spin_unlock_irqrestore(&ipu_lock, lock_flags);
2253 EXPORT_SYMBOL(ipu_disable_csi);
2255 static irqreturn_t ipu_irq_handler(int irq, void *desc)
2259 irqreturn_t result = IRQ_NONE;
2261 const int err_reg[] = { 5, 6, 9, 10, 0 };
2262 const int int_reg[] = { 1, 2, 3, 4, 11, 12, 13, 14, 15, 0 };
2265 if (err_reg[i] == 0)
2267 int_stat = __raw_readl(IPU_INT_STAT(err_reg[i]));
2268 int_stat &= __raw_readl(IPU_INT_CTRL(err_reg[i]));
2270 __raw_writel(int_stat, IPU_INT_STAT(err_reg[i]));
2272 "IPU Error - IPU_INT_STAT_%d = 0x%08X\n",
2273 err_reg[i], int_stat);
2274 /* Disable interrupts so we only get error once */
2276 __raw_readl(IPU_INT_CTRL(err_reg[i])) & ~int_stat;
2277 __raw_writel(int_stat, IPU_INT_CTRL(err_reg[i]));
2282 if (int_reg[i] == 0)
2284 int_stat = __raw_readl(IPU_INT_STAT(int_reg[i]));
2285 int_stat &= __raw_readl(IPU_INT_CTRL(int_reg[i]));
2286 __raw_writel(int_stat, IPU_INT_STAT(int_reg[i]));
2287 while ((line = ffs(int_stat)) != 0) {
2289 int_stat &= ~(1UL << line);
2290 line += (int_reg[i] - 1) * 32;
2292 ipu_irq_list[line].handler(line,
2302 * This function enables the interrupt for the specified interrupt line.
2303 * The interrupt lines are defined in \b ipu_irq_line enum.
2305 * @param irq Interrupt line to enable interrupt for.
2308 void ipu_enable_irq(uint32_t irq)
2311 unsigned long lock_flags;
2313 if (!g_ipu_clk_enabled)
2314 clk_enable(g_ipu_clk);
2316 spin_lock_irqsave(&ipu_lock, lock_flags);
2318 reg = __raw_readl(IPUIRQ_2_CTRLREG(irq));
2319 reg |= IPUIRQ_2_MASK(irq);
2320 __raw_writel(reg, IPUIRQ_2_CTRLREG(irq));
2322 spin_unlock_irqrestore(&ipu_lock, lock_flags);
2323 if (!g_ipu_clk_enabled)
2324 clk_disable(g_ipu_clk);
2326 EXPORT_SYMBOL(ipu_enable_irq);
2329 * This function disables the interrupt for the specified interrupt line.
2330 * The interrupt lines are defined in \b ipu_irq_line enum.
2332 * @param irq Interrupt line to disable interrupt for.
2335 void ipu_disable_irq(uint32_t irq)
2338 unsigned long lock_flags;
2340 if (!g_ipu_clk_enabled)
2341 clk_enable(g_ipu_clk);
2342 spin_lock_irqsave(&ipu_lock, lock_flags);
2344 reg = __raw_readl(IPUIRQ_2_CTRLREG(irq));
2345 reg &= ~IPUIRQ_2_MASK(irq);
2346 __raw_writel(reg, IPUIRQ_2_CTRLREG(irq));
2348 spin_unlock_irqrestore(&ipu_lock, lock_flags);
2349 if (!g_ipu_clk_enabled)
2350 clk_disable(g_ipu_clk);
2352 EXPORT_SYMBOL(ipu_disable_irq);
2355 * This function clears the interrupt for the specified interrupt line.
2356 * The interrupt lines are defined in \b ipu_irq_line enum.
2358 * @param irq Interrupt line to clear interrupt for.
2361 void ipu_clear_irq(uint32_t irq)
2363 if (!g_ipu_clk_enabled)
2364 clk_enable(g_ipu_clk);
2366 __raw_writel(IPUIRQ_2_MASK(irq), IPUIRQ_2_STATREG(irq));
2368 if (!g_ipu_clk_enabled)
2369 clk_disable(g_ipu_clk);
2371 EXPORT_SYMBOL(ipu_clear_irq);
2374 * This function returns the current interrupt status for the specified
2375 * interrupt line. The interrupt lines are defined in \b ipu_irq_line enum.
2377 * @param irq Interrupt line to get status for.
2379 * @return Returns true if the interrupt is pending/asserted or false if
2380 * the interrupt is not pending.
2382 bool ipu_get_irq_status(uint32_t irq)
2386 if (!g_ipu_clk_enabled)
2387 clk_enable(g_ipu_clk);
2389 reg = __raw_readl(IPUIRQ_2_STATREG(irq));
2391 if (!g_ipu_clk_enabled)
2392 clk_disable(g_ipu_clk);
2394 if (reg & IPUIRQ_2_MASK(irq))
2399 EXPORT_SYMBOL(ipu_get_irq_status);
2402 * This function registers an interrupt handler function for the specified
2403 * interrupt line. The interrupt lines are defined in \b ipu_irq_line enum.
2405 * @param irq Interrupt line to get status for.
2407 * @param handler Input parameter for address of the handler
2410 * @param irq_flags Flags for interrupt mode. Currently not used.
2412 * @param devname Input parameter for string name of driver
2413 * registering the handler.
2415 * @param dev_id Input parameter for pointer of data to be
2416 * passed to the handler.
2418 * @return This function returns 0 on success or negative error code on
2421 int ipu_request_irq(uint32_t irq,
2422 irqreturn_t(*handler) (int, void *),
2423 uint32_t irq_flags, const char *devname, void *dev_id)
2425 unsigned long lock_flags;
2427 BUG_ON(irq >= IPU_IRQ_COUNT);
2429 spin_lock_irqsave(&ipu_lock, lock_flags);
2431 if (ipu_irq_list[irq].handler != NULL) {
2433 "handler already installed on irq %d\n", irq);
2434 spin_unlock_irqrestore(&ipu_lock, lock_flags);
2438 ipu_irq_list[irq].handler = handler;
2439 ipu_irq_list[irq].flags = irq_flags;
2440 ipu_irq_list[irq].dev_id = dev_id;
2441 ipu_irq_list[irq].name = devname;
2443 spin_unlock_irqrestore(&ipu_lock, lock_flags);
2445 ipu_enable_irq(irq); /* enable the interrupt */
2449 EXPORT_SYMBOL(ipu_request_irq);
2452 * This function unregisters an interrupt handler for the specified interrupt
2453 * line. The interrupt lines are defined in \b ipu_irq_line enum.
2455 * @param irq Interrupt line to get status for.
2457 * @param dev_id Input parameter for pointer of data to be passed
2458 * to the handler. This must match value passed to
2459 * ipu_request_irq().
2462 void ipu_free_irq(uint32_t irq, void *dev_id)
2464 ipu_disable_irq(irq); /* disable the interrupt */
2466 if (ipu_irq_list[irq].dev_id == dev_id)
2467 ipu_irq_list[irq].handler = NULL;
2469 EXPORT_SYMBOL(ipu_free_irq);
2471 uint32_t ipu_get_cur_buffer_idx(ipu_channel_t channel, ipu_buffer_t type)
2473 uint32_t reg, dma_chan;
2475 dma_chan = channel_2_dma(channel, type);
2476 if (!idma_is_valid(dma_chan))
2479 reg = __raw_readl(IPU_CHA_TRB_MODE_SEL(dma_chan));
2480 if ((reg & idma_mask(dma_chan)) && _ipu_is_trb_chan(dma_chan)) {
2481 reg = __raw_readl(IPU_CHA_TRIPLE_CUR_BUF(dma_chan));
2482 return (reg & tri_cur_buf_mask(dma_chan)) >>
2483 tri_cur_buf_shift(dma_chan);
2485 reg = __raw_readl(IPU_CHA_CUR_BUF(dma_chan));
2486 if (reg & idma_mask(dma_chan))
2492 EXPORT_SYMBOL(ipu_get_cur_buffer_idx);
2494 uint32_t _ipu_channel_status(ipu_channel_t channel)
2497 uint32_t task_stat_reg = __raw_readl(IPU_PROC_TASK_STAT);
2500 case MEM_PRP_VF_MEM:
2501 stat = (task_stat_reg & TSTAT_VF_MASK) >> TSTAT_VF_OFFSET;
2503 case MEM_VDI_PRP_VF_MEM:
2504 stat = (task_stat_reg & TSTAT_VF_MASK) >> TSTAT_VF_OFFSET;
2506 case MEM_ROT_VF_MEM:
2508 (task_stat_reg & TSTAT_VF_ROT_MASK) >> TSTAT_VF_ROT_OFFSET;
2510 case MEM_PRP_ENC_MEM:
2511 stat = (task_stat_reg & TSTAT_ENC_MASK) >> TSTAT_ENC_OFFSET;
2513 case MEM_ROT_ENC_MEM:
2515 (task_stat_reg & TSTAT_ENC_ROT_MASK) >>
2516 TSTAT_ENC_ROT_OFFSET;
2519 stat = (task_stat_reg & TSTAT_PP_MASK) >> TSTAT_PP_OFFSET;
2521 case MEM_ROT_PP_MEM:
2523 (task_stat_reg & TSTAT_PP_ROT_MASK) >> TSTAT_PP_ROT_OFFSET;
2527 stat = TASK_STAT_IDLE;
2533 int32_t ipu_swap_channel(ipu_channel_t from_ch, ipu_channel_t to_ch)
2536 unsigned long lock_flags;
2538 int from_dma = channel_2_dma(from_ch, IPU_INPUT_BUFFER);
2539 int to_dma = channel_2_dma(to_ch, IPU_INPUT_BUFFER);
2541 /* enable target channel */
2542 spin_lock_irqsave(&ipu_lock, lock_flags);
2544 reg = __raw_readl(IDMAC_CHA_EN(to_dma));
2545 __raw_writel(reg | idma_mask(to_dma), IDMAC_CHA_EN(to_dma));
2547 g_channel_enable_mask |= 1L << IPU_CHAN_ID(to_ch);
2549 spin_unlock_irqrestore(&ipu_lock, lock_flags);
2552 _ipu_dp_dc_disable(from_ch, true);
2554 /* disable source channel */
2555 spin_lock_irqsave(&ipu_lock, lock_flags);
2557 reg = __raw_readl(IDMAC_CHA_EN(from_dma));
2558 __raw_writel(reg & ~idma_mask(from_dma), IDMAC_CHA_EN(from_dma));
2559 __raw_writel(idma_mask(from_dma), IPU_CHA_CUR_BUF(from_dma));
2560 __raw_writel(tri_cur_buf_mask(from_dma),
2561 IPU_CHA_TRIPLE_CUR_BUF(from_dma));
2563 g_channel_enable_mask &= ~(1L << IPU_CHAN_ID(from_ch));
2565 spin_unlock_irqrestore(&ipu_lock, lock_flags);
2567 ipu_clear_buffer_ready(from_ch, IPU_VIDEO_IN_BUFFER, 0);
2568 ipu_clear_buffer_ready(from_ch, IPU_VIDEO_IN_BUFFER, 1);
2569 ipu_clear_buffer_ready(from_ch, IPU_VIDEO_IN_BUFFER, 2);
2573 EXPORT_SYMBOL(ipu_swap_channel);
2575 uint32_t bytes_per_pixel(uint32_t fmt)
2578 case IPU_PIX_FMT_GENERIC: /*generic data */
2579 case IPU_PIX_FMT_RGB332:
2580 case IPU_PIX_FMT_YUV420P:
2581 case IPU_PIX_FMT_YVU420P:
2582 case IPU_PIX_FMT_YUV422P:
2585 case IPU_PIX_FMT_RGB565:
2586 case IPU_PIX_FMT_YUYV:
2587 case IPU_PIX_FMT_UYVY:
2590 case IPU_PIX_FMT_BGR24:
2591 case IPU_PIX_FMT_RGB24:
2594 case IPU_PIX_FMT_GENERIC_32: /*generic data */
2595 case IPU_PIX_FMT_BGR32:
2596 case IPU_PIX_FMT_BGRA32:
2597 case IPU_PIX_FMT_RGB32:
2598 case IPU_PIX_FMT_RGBA32:
2599 case IPU_PIX_FMT_ABGR32:
2608 EXPORT_SYMBOL(bytes_per_pixel);
2610 ipu_color_space_t format_to_colorspace(uint32_t fmt)
2613 case IPU_PIX_FMT_RGB666:
2614 case IPU_PIX_FMT_RGB565:
2615 case IPU_PIX_FMT_BGR24:
2616 case IPU_PIX_FMT_RGB24:
2617 case IPU_PIX_FMT_GBR24:
2618 case IPU_PIX_FMT_BGR32:
2619 case IPU_PIX_FMT_BGRA32:
2620 case IPU_PIX_FMT_RGB32:
2621 case IPU_PIX_FMT_RGBA32:
2622 case IPU_PIX_FMT_ABGR32:
2623 case IPU_PIX_FMT_LVDS666:
2624 case IPU_PIX_FMT_LVDS888:
2635 bool ipu_pixel_format_has_alpha(uint32_t fmt)
2638 case IPU_PIX_FMT_RGBA32:
2639 case IPU_PIX_FMT_BGRA32:
2640 case IPU_PIX_FMT_ABGR32:
2650 void ipu_set_csc_coefficients(ipu_channel_t channel, int32_t param[][3])
2652 _ipu_dp_set_csc_coefficients(channel, param);
2654 EXPORT_SYMBOL(ipu_set_csc_coefficients);
2656 static int ipu_suspend(struct platform_device *pdev, pm_message_t state)
2658 struct imx_ipuv3_platform_data *plat_data = pdev->dev.platform_data;
2660 if (g_ipu_clk_enabled) {
2661 /* save and disable enabled channels*/
2662 idma_enable_reg[0] = __raw_readl(IDMAC_CHA_EN(0));
2663 idma_enable_reg[1] = __raw_readl(IDMAC_CHA_EN(32));
2664 while ((__raw_readl(IDMAC_CHA_BUSY(0)) & idma_enable_reg[0])
2665 || (__raw_readl(IDMAC_CHA_BUSY(32)) &
2666 idma_enable_reg[1])) {
2667 /* disable channel not busy already */
2668 uint32_t chan_should_disable, timeout = 1000, time = 0;
2670 chan_should_disable =
2671 __raw_readl(IDMAC_CHA_BUSY(0))
2672 ^ idma_enable_reg[0];
2673 __raw_writel((~chan_should_disable) &
2674 idma_enable_reg[0], IDMAC_CHA_EN(0));
2675 chan_should_disable =
2676 __raw_readl(IDMAC_CHA_BUSY(1))
2677 ^ idma_enable_reg[1];
2678 __raw_writel((~chan_should_disable) &
2679 idma_enable_reg[1], IDMAC_CHA_EN(32));
2682 if (time >= timeout)
2685 __raw_writel(0, IDMAC_CHA_EN(0));
2686 __raw_writel(0, IDMAC_CHA_EN(32));
2688 /* save double buffer select regs */
2689 ipu_cha_db_mode_reg[0] = __raw_readl(IPU_CHA_DB_MODE_SEL(0));
2690 ipu_cha_db_mode_reg[1] = __raw_readl(IPU_CHA_DB_MODE_SEL(32));
2691 ipu_cha_db_mode_reg[2] =
2692 __raw_readl(IPU_ALT_CHA_DB_MODE_SEL(0));
2693 ipu_cha_db_mode_reg[3] =
2694 __raw_readl(IPU_ALT_CHA_DB_MODE_SEL(32));
2696 /* save triple buffer select regs */
2697 ipu_cha_trb_mode_reg[0] = __raw_readl(IPU_CHA_TRB_MODE_SEL(0));
2698 ipu_cha_trb_mode_reg[1] = __raw_readl(IPU_CHA_TRB_MODE_SEL(32));
2700 /* save current buffer regs */
2701 ipu_cha_cur_buf_reg[0] = __raw_readl(IPU_CHA_CUR_BUF(0));
2702 ipu_cha_cur_buf_reg[1] = __raw_readl(IPU_CHA_CUR_BUF(32));
2703 ipu_cha_cur_buf_reg[2] = __raw_readl(IPU_ALT_CUR_BUF0);
2704 ipu_cha_cur_buf_reg[3] = __raw_readl(IPU_ALT_CUR_BUF1);
2706 /* save current triple buffer regs */
2707 ipu_cha_triple_cur_buf_reg[0] =
2708 __raw_readl(IPU_CHA_TRIPLE_CUR_BUF(0));
2709 ipu_cha_triple_cur_buf_reg[1] =
2710 __raw_readl(IPU_CHA_TRIPLE_CUR_BUF(32));
2711 ipu_cha_triple_cur_buf_reg[2] =
2712 __raw_readl(IPU_CHA_TRIPLE_CUR_BUF(64));
2713 ipu_cha_triple_cur_buf_reg[3] =
2714 __raw_readl(IPU_CHA_TRIPLE_CUR_BUF(96));
2716 /* save idamc sub addr regs */
2717 idma_sub_addr_reg[0] = __raw_readl(IDMAC_SUB_ADDR_0);
2718 idma_sub_addr_reg[1] = __raw_readl(IDMAC_SUB_ADDR_1);
2719 idma_sub_addr_reg[2] = __raw_readl(IDMAC_SUB_ADDR_2);
2720 idma_sub_addr_reg[3] = __raw_readl(IDMAC_SUB_ADDR_3);
2721 idma_sub_addr_reg[4] = __raw_readl(IDMAC_SUB_ADDR_4);
2723 /* save sub-modules status and disable all */
2724 ic_conf_reg = __raw_readl(IC_CONF);
2725 __raw_writel(0, IC_CONF);
2726 ipu_conf_reg = __raw_readl(IPU_CONF);
2727 __raw_writel(0, IPU_CONF);
2729 /* save buf ready regs */
2730 buf_ready_reg[0] = __raw_readl(IPU_CHA_BUF0_RDY(0));
2731 buf_ready_reg[1] = __raw_readl(IPU_CHA_BUF0_RDY(32));
2732 buf_ready_reg[2] = __raw_readl(IPU_CHA_BUF1_RDY(0));
2733 buf_ready_reg[3] = __raw_readl(IPU_CHA_BUF1_RDY(32));
2734 buf_ready_reg[4] = __raw_readl(IPU_ALT_CHA_BUF0_RDY(0));
2735 buf_ready_reg[5] = __raw_readl(IPU_ALT_CHA_BUF0_RDY(32));
2736 buf_ready_reg[6] = __raw_readl(IPU_ALT_CHA_BUF1_RDY(0));
2737 buf_ready_reg[7] = __raw_readl(IPU_ALT_CHA_BUF1_RDY(32));
2738 buf_ready_reg[8] = __raw_readl(IPU_CHA_BUF2_RDY(0));
2739 buf_ready_reg[9] = __raw_readl(IPU_CHA_BUF2_RDY(32));
2748 static int ipu_resume(struct platform_device *pdev)
2750 struct imx_ipuv3_platform_data *plat_data = pdev->dev.platform_data;
2755 if (g_ipu_clk_enabled) {
2757 /* restore buf ready regs */
2758 __raw_writel(buf_ready_reg[0], IPU_CHA_BUF0_RDY(0));
2759 __raw_writel(buf_ready_reg[1], IPU_CHA_BUF0_RDY(32));
2760 __raw_writel(buf_ready_reg[2], IPU_CHA_BUF1_RDY(0));
2761 __raw_writel(buf_ready_reg[3], IPU_CHA_BUF1_RDY(32));
2762 __raw_writel(buf_ready_reg[4], IPU_ALT_CHA_BUF0_RDY(0));
2763 __raw_writel(buf_ready_reg[5], IPU_ALT_CHA_BUF0_RDY(32));
2764 __raw_writel(buf_ready_reg[6], IPU_ALT_CHA_BUF1_RDY(0));
2765 __raw_writel(buf_ready_reg[7], IPU_ALT_CHA_BUF1_RDY(32));
2766 __raw_writel(buf_ready_reg[8], IPU_CHA_BUF2_RDY(0));
2767 __raw_writel(buf_ready_reg[9], IPU_CHA_BUF2_RDY(32));
2769 /* re-enable sub-modules*/
2770 __raw_writel(ipu_conf_reg, IPU_CONF);
2771 __raw_writel(ic_conf_reg, IC_CONF);
2773 /* restore double buffer select regs */
2774 __raw_writel(ipu_cha_db_mode_reg[0], IPU_CHA_DB_MODE_SEL(0));
2775 __raw_writel(ipu_cha_db_mode_reg[1], IPU_CHA_DB_MODE_SEL(32));
2776 __raw_writel(ipu_cha_db_mode_reg[2],
2777 IPU_ALT_CHA_DB_MODE_SEL(0));
2778 __raw_writel(ipu_cha_db_mode_reg[3],
2779 IPU_ALT_CHA_DB_MODE_SEL(32));
2781 /* restore triple buffer select regs */
2782 __raw_writel(ipu_cha_trb_mode_reg[0], IPU_CHA_TRB_MODE_SEL(0));
2783 __raw_writel(ipu_cha_trb_mode_reg[1], IPU_CHA_TRB_MODE_SEL(32));
2785 /* restore current buffer select regs */
2786 __raw_writel(~(ipu_cha_cur_buf_reg[0]), IPU_CHA_CUR_BUF(0));
2787 __raw_writel(~(ipu_cha_cur_buf_reg[1]), IPU_CHA_CUR_BUF(32));
2788 __raw_writel(~(ipu_cha_cur_buf_reg[2]), IPU_ALT_CUR_BUF0);
2789 __raw_writel(~(ipu_cha_cur_buf_reg[3]), IPU_ALT_CUR_BUF1);
2791 /* restore triple current buffer select regs */
2792 __raw_writel(~(ipu_cha_triple_cur_buf_reg[0]),
2793 IPU_CHA_TRIPLE_CUR_BUF(0));
2794 __raw_writel(~(ipu_cha_triple_cur_buf_reg[1]),
2795 IPU_CHA_TRIPLE_CUR_BUF(32));
2796 __raw_writel(~(ipu_cha_triple_cur_buf_reg[2]),
2797 IPU_CHA_TRIPLE_CUR_BUF(64));
2798 __raw_writel(~(ipu_cha_triple_cur_buf_reg[3]),
2799 IPU_CHA_TRIPLE_CUR_BUF(96));
2801 /* restore idamc sub addr regs */
2802 __raw_writel(idma_sub_addr_reg[0], IDMAC_SUB_ADDR_0);
2803 __raw_writel(idma_sub_addr_reg[1], IDMAC_SUB_ADDR_1);
2804 __raw_writel(idma_sub_addr_reg[2], IDMAC_SUB_ADDR_2);
2805 __raw_writel(idma_sub_addr_reg[3], IDMAC_SUB_ADDR_3);
2806 __raw_writel(idma_sub_addr_reg[4], IDMAC_SUB_ADDR_4);
2808 /* restart idma channel*/
2809 __raw_writel(idma_enable_reg[0], IDMAC_CHA_EN(0));
2810 __raw_writel(idma_enable_reg[1], IDMAC_CHA_EN(32));
2812 clk_enable(g_ipu_clk);
2813 _ipu_dmfc_init(dmfc_type_setup, 1);
2814 _ipu_init_dc_mappings();
2816 /* Set sync refresh channels as high priority */
2817 __raw_writel(0x18800001L, IDMAC_CHA_PRI(0));
2818 clk_disable(g_ipu_clk);
2825 * This structure contains pointers to the power management callback functions.
2827 static struct platform_driver mxcipu_driver = {
2829 .name = "imx-ipuv3",
2832 .remove = ipu_remove,
2833 .suspend = ipu_suspend,
2834 .resume = ipu_resume,
2837 int32_t __init ipu_gen_init(void)
2841 ret = platform_driver_register(&mxcipu_driver);
2845 subsys_initcall(ipu_gen_init);
2847 static void __exit ipu_gen_uninit(void)
2849 platform_driver_unregister(&mxcipu_driver);
2852 module_exit(ipu_gen_uninit);