2 * Copyright 2005-2013 Freescale Semiconductor, Inc. All Rights Reserved.
6 * The code contained herein is licensed under the GNU General Public
7 * License. You may obtain a copy of the GNU General Public License
8 * Version 2 or later at the following locations:
10 * http://www.opensource.org/licenses/gpl-license.html
11 * http://www.gnu.org/copyleft/gpl.html
17 * @brief This file contains the IPU driver common API functions.
21 #include <linux/clk.h>
22 #include <linux/clk-provider.h>
23 #include <linux/delay.h>
24 #include <linux/err.h>
25 #include <linux/init.h>
26 #include <linux/interrupt.h>
28 #include <linux/ipu-v3.h>
29 #include <linux/irq.h>
30 #include <linux/irqdesc.h>
31 #include <linux/module.h>
32 #include <linux/mod_devicetable.h>
33 #include <linux/of_device.h>
34 #include <linux/platform_device.h>
35 #include <linux/pm_runtime.h>
36 #include <linux/reset.h>
37 #include <linux/spinlock.h>
38 #include <linux/types.h>
40 #include <asm/cacheflush.h>
42 #include "ipu_param_mem.h"
45 static struct ipu_soc ipu_array[MXC_IPU_MAX_NUM];
48 /* Static functions */
49 static irqreturn_t ipu_sync_irq_handler(int irq, void *desc);
50 static irqreturn_t ipu_err_irq_handler(int irq, void *desc);
52 static inline uint32_t channel_2_dma(ipu_channel_t ch, ipu_buffer_t type)
54 return ((uint32_t) ch >> (6 * type)) & 0x3F;
57 static inline int _ipu_is_ic_chan(uint32_t dma_chan)
59 return (((dma_chan >= 11) && (dma_chan <= 22) && (dma_chan != 17) &&
63 static inline int _ipu_is_vdi_out_chan(uint32_t dma_chan)
65 return (dma_chan == 5);
68 static inline int _ipu_is_ic_graphic_chan(uint32_t dma_chan)
70 return (dma_chan == 14 || dma_chan == 15);
73 /* Either DP BG or DP FG can be graphic window */
74 static inline int _ipu_is_dp_graphic_chan(uint32_t dma_chan)
76 return (dma_chan == 23 || dma_chan == 27);
79 static inline int _ipu_is_irt_chan(uint32_t dma_chan)
81 return ((dma_chan >= 45) && (dma_chan <= 50));
84 static inline int _ipu_is_dmfc_chan(uint32_t dma_chan)
86 return ((dma_chan >= 23) && (dma_chan <= 29));
89 static inline int _ipu_is_smfc_chan(uint32_t dma_chan)
91 return ((dma_chan >= 0) && (dma_chan <= 3));
94 static inline int _ipu_is_trb_chan(uint32_t dma_chan)
96 return (((dma_chan == 8) || (dma_chan == 9) ||
97 (dma_chan == 10) || (dma_chan == 13) ||
98 (dma_chan == 21) || (dma_chan == 23) ||
99 (dma_chan == 27) || (dma_chan == 28)) &&
100 (g_ipu_hw_rev >= IPU_V3DEX));
104 * We usually use IDMAC 23 as full plane and IDMAC 27 as partial
106 * IDMAC 23/24/28/41 can drive a display respectively - primary
107 * IDMAC 27 depends on IDMAC 23 - nonprimary
109 static inline int _ipu_is_primary_disp_chan(uint32_t dma_chan)
111 return ((dma_chan == 23) || (dma_chan == 24) ||
112 (dma_chan == 28) || (dma_chan == 41));
115 static inline int _ipu_is_sync_irq(uint32_t irq)
117 /* sync interrupt register number */
118 int reg_num = irq / 32 + 1;
120 return ((reg_num == 1) || (reg_num == 2) || (reg_num == 3) ||
121 (reg_num == 4) || (reg_num == 7) || (reg_num == 8) ||
122 (reg_num == 11) || (reg_num == 12) || (reg_num == 13) ||
123 (reg_num == 14) || (reg_num == 15));
126 #define idma_is_valid(ch) (ch != NO_DMA)
127 #define idma_mask(ch) (idma_is_valid(ch) ? (1UL << (ch & 0x1F)) : 0)
128 #define idma_is_set(ipu, reg, dma) (ipu_idmac_read(ipu, reg(dma)) & idma_mask(dma))
129 #define tri_cur_buf_mask(ch) (idma_mask(ch*2) * 3)
130 #define tri_cur_buf_shift(ch) (ffs(idma_mask(ch*2)) - 1)
132 static int ipu_clk_setup_enable(struct ipu_soc *ipu,
133 struct ipu_pltfm_data *pdata)
135 char pixel_clk_0[] = "ipu1_pclk_0";
136 char pixel_clk_1[] = "ipu1_pclk_1";
137 char pixel_clk_0_sel[] = "ipu1_pclk0_sel";
138 char pixel_clk_1_sel[] = "ipu1_pclk1_sel";
139 char pixel_clk_0_div[] = "ipu1_pclk0_div";
140 char pixel_clk_1_div[] = "ipu1_pclk1_div";
141 char *ipu_pixel_clk_sel[] = { "ipu1", "ipu1_di0", "ipu1_di1", };
147 pixel_clk_0[3] += pdata->id;
148 pixel_clk_1[3] += pdata->id;
149 pixel_clk_0_sel[3] += pdata->id;
150 pixel_clk_1_sel[3] += pdata->id;
151 pixel_clk_0_div[3] += pdata->id;
152 pixel_clk_1_div[3] += pdata->id;
153 for (i = 0; i < ARRAY_SIZE(ipu_pixel_clk_sel); i++) {
154 pclk_sel = ipu_pixel_clk_sel[i];
155 pclk_sel[3] += pdata->id;
157 dev_dbg(ipu->dev, "ipu_clk = %lu\n", clk_get_rate(ipu->ipu_clk));
159 clk = clk_register_mux_pix_clk(ipu->dev, pixel_clk_0_sel,
160 (const char **)ipu_pixel_clk_sel,
161 ARRAY_SIZE(ipu_pixel_clk_sel),
164 dev_err(ipu->dev, "clk_register mux di0 failed");
167 ipu->pixel_clk_sel[0] = clk;
168 clk = clk_register_mux_pix_clk(ipu->dev, pixel_clk_1_sel,
169 (const char **)ipu_pixel_clk_sel,
170 ARRAY_SIZE(ipu_pixel_clk_sel),
173 dev_err(ipu->dev, "clk_register mux di1 failed");
176 ipu->pixel_clk_sel[1] = clk;
178 clk = clk_register_div_pix_clk(ipu->dev, pixel_clk_0_div,
179 pixel_clk_0_sel, 0, pdata->id, 0, 0);
181 dev_err(ipu->dev, "clk register di0 div failed");
184 clk = clk_register_div_pix_clk(ipu->dev, pixel_clk_1_div,
185 pixel_clk_1_sel, CLK_SET_RATE_PARENT, pdata->id, 1, 0);
187 dev_err(ipu->dev, "clk register di1 div failed");
191 ipu->pixel_clk[0] = clk_register_gate_pix_clk(ipu->dev, pixel_clk_0,
192 pixel_clk_0_div, CLK_SET_RATE_PARENT,
194 if (IS_ERR(ipu->pixel_clk[0])) {
195 dev_err(ipu->dev, "clk register di0 gate failed");
196 return PTR_ERR(ipu->pixel_clk[0]);
198 ipu->pixel_clk[1] = clk_register_gate_pix_clk(ipu->dev, pixel_clk_1,
199 pixel_clk_1_div, CLK_SET_RATE_PARENT,
201 if (IS_ERR(ipu->pixel_clk[1])) {
202 dev_err(ipu->dev, "clk register di1 gate failed");
203 return PTR_ERR(ipu->pixel_clk[1]);
206 ret = clk_set_parent(ipu->pixel_clk_sel[0], ipu->ipu_clk);
208 dev_err(ipu->dev, "clk set parent failed");
212 ret = clk_set_parent(ipu->pixel_clk_sel[1], ipu->ipu_clk);
214 dev_err(ipu->dev, "clk set parent failed");
218 ipu->di_clk[0] = devm_clk_get(ipu->dev, "di0");
219 if (IS_ERR(ipu->di_clk[0])) {
220 dev_err(ipu->dev, "clk_get di0 failed");
221 return PTR_ERR(ipu->di_clk[0]);
223 ipu->di_clk[1] = devm_clk_get(ipu->dev, "di1");
224 if (IS_ERR(ipu->di_clk[1])) {
225 dev_err(ipu->dev, "clk_get di1 failed");
226 return PTR_ERR(ipu->di_clk[1]);
229 ipu->di_clk_sel[0] = devm_clk_get(ipu->dev, "di0_sel");
230 if (IS_ERR(ipu->di_clk_sel[0])) {
231 dev_err(ipu->dev, "clk_get di0_sel failed");
232 return PTR_ERR(ipu->di_clk_sel[0]);
234 ipu->di_clk_sel[1] = devm_clk_get(ipu->dev, "di1_sel");
235 if (IS_ERR(ipu->di_clk_sel[1])) {
236 dev_err(ipu->dev, "clk_get di1_sel failed");
237 return PTR_ERR(ipu->di_clk_sel[1]);
243 static int ipu_mem_reset(struct ipu_soc *ipu)
247 ipu_cm_write(ipu, 0x807FFFFF, IPU_MEM_RST);
249 while (ipu_cm_read(ipu, IPU_MEM_RST) & 0x80000000) {
258 struct ipu_soc *ipu_get_soc(int id)
260 if (id >= MXC_IPU_MAX_NUM)
261 return ERR_PTR(-ENODEV);
262 else if (!ipu_array[id].online)
263 return ERR_PTR(-ENODEV);
265 return &(ipu_array[id]);
267 EXPORT_SYMBOL_GPL(ipu_get_soc);
269 void _ipu_get(struct ipu_soc *ipu)
273 ret = clk_enable(ipu->ipu_clk);
278 void _ipu_put(struct ipu_soc *ipu)
280 clk_disable(ipu->ipu_clk);
283 void ipu_disable_hsp_clk(struct ipu_soc *ipu)
287 EXPORT_SYMBOL(ipu_disable_hsp_clk);
289 static struct platform_device_id imx_ipu_type[] = {
292 .driver_data = IPU_V3H,
297 MODULE_DEVICE_TABLE(platform, imx_ipu_type);
299 static const struct of_device_id imx_ipuv3_dt_ids[] = {
300 { .compatible = "fsl,imx6q-ipu", .data = &imx_ipu_type[IMX6Q_IPU], },
303 MODULE_DEVICE_TABLE(of, imx_ipuv3_dt_ids);
306 * This function is called by the driver framework to initialize the IPU
309 * @param dev The device structure for the IPU passed in by the
312 * @return Returns 0 on success or negative error code on error
314 static int ipu_probe(struct platform_device *pdev)
317 struct resource *res;
318 unsigned long ipu_base;
319 const struct of_device_id *of_id =
320 of_match_device(imx_ipuv3_dt_ids, &pdev->dev);
321 struct ipu_pltfm_data *pltfm_data;
325 dev_dbg(&pdev->dev, "<%s>\n", __func__);
327 pltfm_data = devm_kzalloc(&pdev->dev, sizeof(struct ipu_pltfm_data),
332 ret = of_property_read_u32(pdev->dev.of_node,
333 "bypass_reset", &bypass_reset);
335 dev_dbg(&pdev->dev, "can not get bypass_reset\n");
338 pltfm_data->bypass_reset = (bool)bypass_reset;
340 pltfm_data->id = of_alias_get_id(pdev->dev.of_node, "ipu");
341 if (pltfm_data->id < 0) {
342 dev_dbg(&pdev->dev, "can not get alias id\n");
343 return pltfm_data->id;
347 pdev->id_entry = of_id->data;
348 pltfm_data->devtype = pdev->id_entry->driver_data;
349 g_ipu_hw_rev = pltfm_data->devtype;
351 ipu = &ipu_array[pltfm_data->id];
352 memset(ipu, 0, sizeof(struct ipu_soc));
353 ipu->dev = &pdev->dev;
354 ipu->pdata = pltfm_data;
355 dev_dbg(ipu->dev, "IPU rev:%d\n", g_ipu_hw_rev);
356 spin_lock_init(&ipu->int_reg_spin_lock);
357 spin_lock_init(&ipu->rdy_reg_spin_lock);
358 mutex_init(&ipu->mutex_lock);
360 ipu->irq_sync = platform_get_irq(pdev, 0);
361 ipu->irq_err = platform_get_irq(pdev, 1);
362 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
364 if (!res || ipu->irq_sync < 0 || ipu->irq_err < 0) {
365 dev_err(&pdev->dev, "can't get device resources\n");
369 if (!devm_request_mem_region(&pdev->dev, res->start,
370 resource_size(res), pdev->name))
373 ret = devm_request_irq(&pdev->dev, ipu->irq_sync,
374 ipu_sync_irq_handler, 0, pdev->name, ipu);
376 dev_err(ipu->dev, "request SYNC interrupt failed\n");
379 ret = devm_request_irq(&pdev->dev, ipu->irq_err,
380 ipu_err_irq_handler, 0, pdev->name, ipu);
382 dev_err(ipu->dev, "request ERR interrupt failed\n");
386 ipu_base = res->start;
388 if (g_ipu_hw_rev == IPU_V3H) /* IPUv3H */
389 ipu_base += IPUV3H_REG_BASE;
390 else if (g_ipu_hw_rev == IPU_V3M) /* IPUv3M */
391 ipu_base += IPUV3M_REG_BASE;
392 else /* IPUv3D, v3E, v3EX */
393 ipu_base += IPUV3DEX_REG_BASE;
395 ipu->cm_reg = devm_ioremap(&pdev->dev,
396 ipu_base + IPU_CM_REG_BASE, PAGE_SIZE);
397 ipu->ic_reg = devm_ioremap(&pdev->dev,
398 ipu_base + IPU_IC_REG_BASE, PAGE_SIZE);
399 ipu->idmac_reg = devm_ioremap(&pdev->dev,
400 ipu_base + IPU_IDMAC_REG_BASE, PAGE_SIZE);
401 /* DP Registers are accessed thru the SRM */
402 ipu->dp_reg = devm_ioremap(&pdev->dev,
403 ipu_base + IPU_SRM_REG_BASE, PAGE_SIZE);
404 ipu->dc_reg = devm_ioremap(&pdev->dev,
405 ipu_base + IPU_DC_REG_BASE, PAGE_SIZE);
406 ipu->dmfc_reg = devm_ioremap(&pdev->dev,
407 ipu_base + IPU_DMFC_REG_BASE, PAGE_SIZE);
408 ipu->di_reg[0] = devm_ioremap(&pdev->dev,
409 ipu_base + IPU_DI0_REG_BASE, PAGE_SIZE);
410 ipu->di_reg[1] = devm_ioremap(&pdev->dev,
411 ipu_base + IPU_DI1_REG_BASE, PAGE_SIZE);
412 ipu->smfc_reg = devm_ioremap(&pdev->dev,
413 ipu_base + IPU_SMFC_REG_BASE, PAGE_SIZE);
414 ipu->csi_reg[0] = devm_ioremap(&pdev->dev,
415 ipu_base + IPU_CSI0_REG_BASE, PAGE_SIZE);
416 ipu->csi_reg[1] = devm_ioremap(&pdev->dev,
417 ipu_base + IPU_CSI1_REG_BASE, PAGE_SIZE);
418 ipu->cpmem_base = devm_ioremap(&pdev->dev,
419 ipu_base + IPU_CPMEM_REG_BASE, SZ_128K);
420 ipu->tpmem_base = devm_ioremap(&pdev->dev,
421 ipu_base + IPU_TPM_REG_BASE, SZ_64K);
422 ipu->dc_tmpl_reg = devm_ioremap(&pdev->dev,
423 ipu_base + IPU_DC_TMPL_REG_BASE, SZ_128K);
424 ipu->vdi_reg = devm_ioremap(&pdev->dev,
425 ipu_base + IPU_VDI_REG_BASE, PAGE_SIZE);
426 ipu->disp_base[1] = devm_ioremap(&pdev->dev,
427 ipu_base + IPU_DISP1_BASE, SZ_4K);
428 if (!ipu->cm_reg || !ipu->ic_reg || !ipu->idmac_reg ||
429 !ipu->dp_reg || !ipu->dc_reg || !ipu->dmfc_reg ||
430 !ipu->di_reg[0] || !ipu->di_reg[1] || !ipu->smfc_reg ||
431 !ipu->csi_reg[0] || !ipu->csi_reg[1] || !ipu->cpmem_base ||
432 !ipu->tpmem_base || !ipu->dc_tmpl_reg || !ipu->disp_base[1]
436 dev_dbg(ipu->dev, "IPU CM Regs = %p\n", ipu->cm_reg);
437 dev_dbg(ipu->dev, "IPU IC Regs = %p\n", ipu->ic_reg);
438 dev_dbg(ipu->dev, "IPU IDMAC Regs = %p\n", ipu->idmac_reg);
439 dev_dbg(ipu->dev, "IPU DP Regs = %p\n", ipu->dp_reg);
440 dev_dbg(ipu->dev, "IPU DC Regs = %p\n", ipu->dc_reg);
441 dev_dbg(ipu->dev, "IPU DMFC Regs = %p\n", ipu->dmfc_reg);
442 dev_dbg(ipu->dev, "IPU DI0 Regs = %p\n", ipu->di_reg[0]);
443 dev_dbg(ipu->dev, "IPU DI1 Regs = %p\n", ipu->di_reg[1]);
444 dev_dbg(ipu->dev, "IPU SMFC Regs = %p\n", ipu->smfc_reg);
445 dev_dbg(ipu->dev, "IPU CSI0 Regs = %p\n", ipu->csi_reg[0]);
446 dev_dbg(ipu->dev, "IPU CSI1 Regs = %p\n", ipu->csi_reg[1]);
447 dev_dbg(ipu->dev, "IPU CPMem = %p\n", ipu->cpmem_base);
448 dev_dbg(ipu->dev, "IPU TPMem = %p\n", ipu->tpmem_base);
449 dev_dbg(ipu->dev, "IPU DC Template Mem = %p\n", ipu->dc_tmpl_reg);
450 dev_dbg(ipu->dev, "IPU Display Region 1 Mem = %p\n", ipu->disp_base[1]);
451 dev_dbg(ipu->dev, "IPU VDI Regs = %p\n", ipu->vdi_reg);
453 ipu->ipu_clk = devm_clk_get(ipu->dev, "bus");
454 if (IS_ERR(ipu->ipu_clk)) {
455 dev_err(ipu->dev, "clk_get ipu failed");
456 return PTR_ERR(ipu->ipu_clk);
459 /* ipu_clk is always prepared */
460 ret = clk_prepare_enable(ipu->ipu_clk);
462 dev_err(ipu->dev, "ipu clk enable failed\n");
467 ret = ipu_clk_setup_enable(ipu, pltfm_data);
469 dev_err(ipu->dev, "ipu clk setup failed\n");
474 platform_set_drvdata(pdev, ipu);
476 if (!pltfm_data->bypass_reset) {
477 ret = device_reset(&pdev->dev);
479 dev_err(&pdev->dev, "failed to reset: %d\n", ret);
487 /* Set MCU_T to divide MCU access window into 2 */
488 ipu_cm_write(ipu, 0x00400000L | (IPU_MCU_T_DEFAULT << 18),
492 /* Set sync refresh channels and CSI->mem channel as high priority */
493 ipu_idmac_write(ipu, 0x18800001L, IDMAC_CHA_PRI(0));
495 /* Enable error interrupts by default */
496 ipu_cm_write(ipu, 0xFFFFFFFF, IPU_INT_CTRL(5));
497 ipu_cm_write(ipu, 0xFFFFFFFF, IPU_INT_CTRL(6));
498 ipu_cm_write(ipu, 0xFFFFFFFF, IPU_INT_CTRL(9));
499 ipu_cm_write(ipu, 0xFFFFFFFF, IPU_INT_CTRL(10));
501 if (!pltfm_data->bypass_reset)
502 clk_disable(ipu->ipu_clk);
504 register_ipu_device(ipu, ipu->pdata->id);
506 pm_runtime_enable(&pdev->dev);
511 int ipu_remove(struct platform_device *pdev)
513 struct ipu_soc *ipu = platform_get_drvdata(pdev);
515 unregister_ipu_device(ipu, ipu->pdata->id);
517 clk_put(ipu->ipu_clk);
522 void ipu_dump_registers(struct ipu_soc *ipu)
524 dev_dbg(ipu->dev, "IPU_CONF = \t0x%08X\n", ipu_cm_read(ipu, IPU_CONF));
525 dev_dbg(ipu->dev, "IDMAC_CONF = \t0x%08X\n", ipu_idmac_read(ipu, IDMAC_CONF));
526 dev_dbg(ipu->dev, "IDMAC_CHA_EN1 = \t0x%08X\n",
527 ipu_idmac_read(ipu, IDMAC_CHA_EN(0)));
528 dev_dbg(ipu->dev, "IDMAC_CHA_EN2 = \t0x%08X\n",
529 ipu_idmac_read(ipu, IDMAC_CHA_EN(32)));
530 dev_dbg(ipu->dev, "IDMAC_CHA_PRI1 = \t0x%08X\n",
531 ipu_idmac_read(ipu, IDMAC_CHA_PRI(0)));
532 dev_dbg(ipu->dev, "IDMAC_CHA_PRI2 = \t0x%08X\n",
533 ipu_idmac_read(ipu, IDMAC_CHA_PRI(32)));
534 dev_dbg(ipu->dev, "IDMAC_BAND_EN1 = \t0x%08X\n",
535 ipu_idmac_read(ipu, IDMAC_BAND_EN(0)));
536 dev_dbg(ipu->dev, "IDMAC_BAND_EN2 = \t0x%08X\n",
537 ipu_idmac_read(ipu, IDMAC_BAND_EN(32)));
538 dev_dbg(ipu->dev, "IPU_CHA_DB_MODE_SEL0 = \t0x%08X\n",
539 ipu_cm_read(ipu, IPU_CHA_DB_MODE_SEL(0)));
540 dev_dbg(ipu->dev, "IPU_CHA_DB_MODE_SEL1 = \t0x%08X\n",
541 ipu_cm_read(ipu, IPU_CHA_DB_MODE_SEL(32)));
542 if (g_ipu_hw_rev >= IPU_V3DEX) {
543 dev_dbg(ipu->dev, "IPU_CHA_TRB_MODE_SEL0 = \t0x%08X\n",
544 ipu_cm_read(ipu, IPU_CHA_TRB_MODE_SEL(0)));
545 dev_dbg(ipu->dev, "IPU_CHA_TRB_MODE_SEL1 = \t0x%08X\n",
546 ipu_cm_read(ipu, IPU_CHA_TRB_MODE_SEL(32)));
548 dev_dbg(ipu->dev, "DMFC_WR_CHAN = \t0x%08X\n",
549 ipu_dmfc_read(ipu, DMFC_WR_CHAN));
550 dev_dbg(ipu->dev, "DMFC_WR_CHAN_DEF = \t0x%08X\n",
551 ipu_dmfc_read(ipu, DMFC_WR_CHAN_DEF));
552 dev_dbg(ipu->dev, "DMFC_DP_CHAN = \t0x%08X\n",
553 ipu_dmfc_read(ipu, DMFC_DP_CHAN));
554 dev_dbg(ipu->dev, "DMFC_DP_CHAN_DEF = \t0x%08X\n",
555 ipu_dmfc_read(ipu, DMFC_DP_CHAN_DEF));
556 dev_dbg(ipu->dev, "DMFC_IC_CTRL = \t0x%08X\n",
557 ipu_dmfc_read(ipu, DMFC_IC_CTRL));
558 dev_dbg(ipu->dev, "IPU_FS_PROC_FLOW1 = \t0x%08X\n",
559 ipu_cm_read(ipu, IPU_FS_PROC_FLOW1));
560 dev_dbg(ipu->dev, "IPU_FS_PROC_FLOW2 = \t0x%08X\n",
561 ipu_cm_read(ipu, IPU_FS_PROC_FLOW2));
562 dev_dbg(ipu->dev, "IPU_FS_PROC_FLOW3 = \t0x%08X\n",
563 ipu_cm_read(ipu, IPU_FS_PROC_FLOW3));
564 dev_dbg(ipu->dev, "IPU_FS_DISP_FLOW1 = \t0x%08X\n",
565 ipu_cm_read(ipu, IPU_FS_DISP_FLOW1));
566 dev_dbg(ipu->dev, "IPU_VDIC_VDI_FSIZE = \t0x%08X\n",
567 ipu_vdi_read(ipu, VDI_FSIZE));
568 dev_dbg(ipu->dev, "IPU_VDIC_VDI_C = \t0x%08X\n",
569 ipu_vdi_read(ipu, VDI_C));
570 dev_dbg(ipu->dev, "IPU_IC_CONF = \t0x%08X\n",
571 ipu_ic_read(ipu, IC_CONF));
575 * This function is called to initialize a logical IPU channel.
577 * @param ipu ipu handler
578 * @param channel Input parameter for the logical channel ID to init.
580 * @param params Input parameter containing union of channel
581 * initialization parameters.
583 * @return Returns 0 on success or negative error code on fail
585 int32_t ipu_init_channel(struct ipu_soc *ipu, ipu_channel_t channel, ipu_channel_params_t *params)
591 dev_dbg(ipu->dev, "init channel = %d\n", IPU_CHAN_ID(channel));
593 ret = pm_runtime_get_sync(ipu->dev);
595 dev_err(ipu->dev, "ch = %d, pm_runtime_get failed:%d!\n",
596 IPU_CHAN_ID(channel), ret);
601 * Here, ret could be 1 if the device's runtime PM status was
602 * already 'active', so clear it to be 0.
608 mutex_lock(&ipu->mutex_lock);
610 /* Re-enable error interrupts every time a channel is initialized */
611 ipu_cm_write(ipu, 0xFFFFFFFF, IPU_INT_CTRL(5));
612 ipu_cm_write(ipu, 0xFFFFFFFF, IPU_INT_CTRL(6));
613 ipu_cm_write(ipu, 0xFFFFFFFF, IPU_INT_CTRL(9));
614 ipu_cm_write(ipu, 0xFFFFFFFF, IPU_INT_CTRL(10));
616 if (ipu->channel_init_mask & (1L << IPU_CHAN_ID(channel))) {
617 dev_warn(ipu->dev, "Warning: channel already initialized %d\n",
618 IPU_CHAN_ID(channel));
621 ipu_conf = ipu_cm_read(ipu, IPU_CONF);
628 if (params->csi_mem.csi > 1) {
633 if (params->csi_mem.interlaced)
634 ipu->chan_is_interlaced[channel_2_dma(channel,
635 IPU_OUTPUT_BUFFER)] = true;
637 ipu->chan_is_interlaced[channel_2_dma(channel,
638 IPU_OUTPUT_BUFFER)] = false;
640 ipu->smfc_use_count++;
641 ipu->csi_channel[params->csi_mem.csi] = channel;
644 if (params->csi_mem.mipi_en) {
645 ipu_conf |= (1 << (IPU_CONF_CSI0_DATA_SOURCE_OFFSET +
646 params->csi_mem.csi));
647 _ipu_smfc_init(ipu, channel, params->csi_mem.mipi_vc,
648 params->csi_mem.csi);
649 _ipu_csi_set_mipi_di(ipu, params->csi_mem.mipi_vc,
650 params->csi_mem.mipi_id, params->csi_mem.csi);
652 ipu_conf &= ~(1 << (IPU_CONF_CSI0_DATA_SOURCE_OFFSET +
653 params->csi_mem.csi));
654 _ipu_smfc_init(ipu, channel, 0, params->csi_mem.csi);
657 /*CSI data (include compander) dest*/
658 _ipu_csi_init(ipu, channel, params->csi_mem.csi);
660 case CSI_PRP_ENC_MEM:
661 if (params->csi_prp_enc_mem.csi > 1) {
665 if ((ipu->using_ic_dirct_ch == MEM_VDI_PRP_VF_MEM) ||
666 (ipu->using_ic_dirct_ch == MEM_VDI_MEM)) {
670 ipu->using_ic_dirct_ch = CSI_PRP_ENC_MEM;
673 ipu->csi_channel[params->csi_prp_enc_mem.csi] = channel;
675 if (params->csi_prp_enc_mem.mipi_en) {
676 ipu_conf |= (1 << (IPU_CONF_CSI0_DATA_SOURCE_OFFSET +
677 params->csi_prp_enc_mem.csi));
678 _ipu_csi_set_mipi_di(ipu,
679 params->csi_prp_enc_mem.mipi_vc,
680 params->csi_prp_enc_mem.mipi_id,
681 params->csi_prp_enc_mem.csi);
683 ipu_conf &= ~(1 << (IPU_CONF_CSI0_DATA_SOURCE_OFFSET +
684 params->csi_prp_enc_mem.csi));
686 /*CSI0/1 feed into IC*/
687 ipu_conf &= ~IPU_CONF_IC_INPUT;
688 if (params->csi_prp_enc_mem.csi)
689 ipu_conf |= IPU_CONF_CSI_SEL;
691 ipu_conf &= ~IPU_CONF_CSI_SEL;
693 /*PRP skip buffer in memory, only valid when RWS_EN is true*/
694 reg = ipu_cm_read(ipu, IPU_FS_PROC_FLOW1);
695 ipu_cm_write(ipu, reg & ~FS_ENC_IN_VALID, IPU_FS_PROC_FLOW1);
697 /*CSI data (include compander) dest*/
698 _ipu_csi_init(ipu, channel, params->csi_prp_enc_mem.csi);
699 _ipu_ic_init_prpenc(ipu, params, true);
702 if (params->csi_prp_vf_mem.csi > 1) {
706 if ((ipu->using_ic_dirct_ch == MEM_VDI_PRP_VF_MEM) ||
707 (ipu->using_ic_dirct_ch == MEM_VDI_MEM)) {
711 ipu->using_ic_dirct_ch = CSI_PRP_VF_MEM;
714 ipu->csi_channel[params->csi_prp_vf_mem.csi] = channel;
716 if (params->csi_prp_vf_mem.mipi_en) {
717 ipu_conf |= (1 << (IPU_CONF_CSI0_DATA_SOURCE_OFFSET +
718 params->csi_prp_vf_mem.csi));
719 _ipu_csi_set_mipi_di(ipu,
720 params->csi_prp_vf_mem.mipi_vc,
721 params->csi_prp_vf_mem.mipi_id,
722 params->csi_prp_vf_mem.csi);
724 ipu_conf &= ~(1 << (IPU_CONF_CSI0_DATA_SOURCE_OFFSET +
725 params->csi_prp_vf_mem.csi));
727 /*CSI0/1 feed into IC*/
728 ipu_conf &= ~IPU_CONF_IC_INPUT;
729 if (params->csi_prp_vf_mem.csi)
730 ipu_conf |= IPU_CONF_CSI_SEL;
732 ipu_conf &= ~IPU_CONF_CSI_SEL;
734 /*PRP skip buffer in memory, only valid when RWS_EN is true*/
735 reg = ipu_cm_read(ipu, IPU_FS_PROC_FLOW1);
736 ipu_cm_write(ipu, reg & ~FS_VF_IN_VALID, IPU_FS_PROC_FLOW1);
738 /*CSI data (include compander) dest*/
739 _ipu_csi_init(ipu, channel, params->csi_prp_vf_mem.csi);
740 _ipu_ic_init_prpvf(ipu, params, true);
744 reg = ipu_cm_read(ipu, IPU_FS_PROC_FLOW1);
745 ipu_cm_write(ipu, reg | FS_VF_IN_VALID, IPU_FS_PROC_FLOW1);
747 if (params->mem_prp_vf_mem.graphics_combine_en)
748 ipu->sec_chan_en[IPU_CHAN_ID(channel)] = true;
749 if (params->mem_prp_vf_mem.alpha_chan_en)
750 ipu->thrd_chan_en[IPU_CHAN_ID(channel)] = true;
752 _ipu_ic_init_prpvf(ipu, params, false);
754 case MEM_VDI_PRP_VF_MEM:
755 if ((ipu->using_ic_dirct_ch == CSI_PRP_VF_MEM) ||
756 (ipu->using_ic_dirct_ch == MEM_VDI_MEM) ||
757 (ipu->using_ic_dirct_ch == CSI_PRP_ENC_MEM)) {
761 ipu->using_ic_dirct_ch = MEM_VDI_PRP_VF_MEM;
763 ipu->vdi_use_count++;
764 reg = ipu_cm_read(ipu, IPU_FS_PROC_FLOW1);
765 reg &= ~FS_VDI_SRC_SEL_MASK;
766 ipu_cm_write(ipu, reg , IPU_FS_PROC_FLOW1);
768 if (params->mem_prp_vf_mem.graphics_combine_en)
769 ipu->sec_chan_en[IPU_CHAN_ID(channel)] = true;
770 _ipu_ic_init_prpvf(ipu, params, false);
771 _ipu_vdi_init(ipu, channel, params);
773 case MEM_VDI_PRP_VF_MEM_P:
774 case MEM_VDI_PRP_VF_MEM_N:
777 _ipu_vdi_init(ipu, channel, params);
780 if ((ipu->using_ic_dirct_ch == CSI_PRP_VF_MEM) ||
781 (ipu->using_ic_dirct_ch == MEM_VDI_PRP_VF_MEM) ||
782 (ipu->using_ic_dirct_ch == CSI_PRP_ENC_MEM)) {
786 ipu->using_ic_dirct_ch = MEM_VDI_MEM;
788 ipu->vdi_use_count++;
789 _ipu_ic_init_prpvf(ipu, params, false);
790 _ipu_vdi_init(ipu, channel, params);
794 ipu->rot_use_count++;
795 _ipu_ic_init_rotate_vf(ipu, params);
797 case MEM_PRP_ENC_MEM:
799 reg = ipu_cm_read(ipu, IPU_FS_PROC_FLOW1);
800 ipu_cm_write(ipu, reg | FS_ENC_IN_VALID, IPU_FS_PROC_FLOW1);
801 _ipu_ic_init_prpenc(ipu, params, false);
803 case MEM_ROT_ENC_MEM:
805 ipu->rot_use_count++;
806 _ipu_ic_init_rotate_enc(ipu, params);
809 if (params->mem_pp_mem.graphics_combine_en)
810 ipu->sec_chan_en[IPU_CHAN_ID(channel)] = true;
811 if (params->mem_pp_mem.alpha_chan_en)
812 ipu->thrd_chan_en[IPU_CHAN_ID(channel)] = true;
813 _ipu_ic_init_pp(ipu, params);
817 _ipu_ic_init_rotate_pp(ipu, params);
819 ipu->rot_use_count++;
822 if (params->mem_dc_sync.di > 1) {
827 ipu->dc_di_assignment[1] = params->mem_dc_sync.di;
828 _ipu_dc_init(ipu, 1, params->mem_dc_sync.di,
829 params->mem_dc_sync.interlaced,
830 params->mem_dc_sync.out_pixel_fmt);
831 ipu->di_use_count[params->mem_dc_sync.di]++;
833 ipu->dmfc_use_count++;
836 if (params->mem_dp_bg_sync.di > 1) {
841 if (params->mem_dp_bg_sync.alpha_chan_en)
842 ipu->thrd_chan_en[IPU_CHAN_ID(channel)] = true;
844 ipu->dc_di_assignment[5] = params->mem_dp_bg_sync.di;
845 _ipu_dp_init(ipu, channel, params->mem_dp_bg_sync.in_pixel_fmt,
846 params->mem_dp_bg_sync.out_pixel_fmt);
847 _ipu_dc_init(ipu, 5, params->mem_dp_bg_sync.di,
848 params->mem_dp_bg_sync.interlaced,
849 params->mem_dp_bg_sync.out_pixel_fmt);
850 ipu->di_use_count[params->mem_dp_bg_sync.di]++;
853 ipu->dmfc_use_count++;
856 _ipu_dp_init(ipu, channel, params->mem_dp_fg_sync.in_pixel_fmt,
857 params->mem_dp_fg_sync.out_pixel_fmt);
859 if (params->mem_dp_fg_sync.alpha_chan_en)
860 ipu->thrd_chan_en[IPU_CHAN_ID(channel)] = true;
864 ipu->dmfc_use_count++;
867 if (params->direct_async.di > 1) {
872 ipu->dc_di_assignment[8] = params->direct_async.di;
873 _ipu_dc_init(ipu, 8, params->direct_async.di, false, IPU_PIX_FMT_GENERIC);
874 ipu->di_use_count[params->direct_async.di]++;
878 if (params->direct_async.di > 1) {
883 ipu->dc_di_assignment[9] = params->direct_async.di;
884 _ipu_dc_init(ipu, 9, params->direct_async.di, false, IPU_PIX_FMT_GENERIC);
885 ipu->di_use_count[params->direct_async.di]++;
889 dev_err(ipu->dev, "Missing channel initialization\n");
893 ipu->channel_init_mask |= 1L << IPU_CHAN_ID(channel);
895 ipu_cm_write(ipu, ipu_conf, IPU_CONF);
898 mutex_unlock(&ipu->mutex_lock);
901 EXPORT_SYMBOL(ipu_init_channel);
904 * This function is called to uninitialize a logical IPU channel.
906 * @param ipu ipu handler
907 * @param channel Input parameter for the logical channel ID to uninit.
909 void ipu_uninit_channel(struct ipu_soc *ipu, ipu_channel_t channel)
912 uint32_t in_dma, out_dma = 0;
914 uint32_t dc_chan = 0;
917 mutex_lock(&ipu->mutex_lock);
919 if ((ipu->channel_init_mask & (1L << IPU_CHAN_ID(channel))) == 0) {
920 dev_dbg(ipu->dev, "Channel already uninitialized %d\n",
921 IPU_CHAN_ID(channel));
922 mutex_unlock(&ipu->mutex_lock);
926 /* Make sure channel is disabled */
927 /* Get input and output dma channels */
928 in_dma = channel_2_dma(channel, IPU_VIDEO_IN_BUFFER);
929 out_dma = channel_2_dma(channel, IPU_OUTPUT_BUFFER);
931 if (idma_is_set(ipu, IDMAC_CHA_EN, in_dma) ||
932 idma_is_set(ipu, IDMAC_CHA_EN, out_dma)) {
934 "Channel %d is not disabled, disable first\n",
935 IPU_CHAN_ID(channel));
936 mutex_unlock(&ipu->mutex_lock);
940 ipu_conf = ipu_cm_read(ipu, IPU_CONF);
942 /* Reset the double buffer */
943 reg = ipu_cm_read(ipu, IPU_CHA_DB_MODE_SEL(in_dma));
944 ipu_cm_write(ipu, reg & ~idma_mask(in_dma), IPU_CHA_DB_MODE_SEL(in_dma));
945 reg = ipu_cm_read(ipu, IPU_CHA_DB_MODE_SEL(out_dma));
946 ipu_cm_write(ipu, reg & ~idma_mask(out_dma), IPU_CHA_DB_MODE_SEL(out_dma));
948 /* Reset the triple buffer */
949 reg = ipu_cm_read(ipu, IPU_CHA_TRB_MODE_SEL(in_dma));
950 ipu_cm_write(ipu, reg & ~idma_mask(in_dma), IPU_CHA_TRB_MODE_SEL(in_dma));
951 reg = ipu_cm_read(ipu, IPU_CHA_TRB_MODE_SEL(out_dma));
952 ipu_cm_write(ipu, reg & ~idma_mask(out_dma), IPU_CHA_TRB_MODE_SEL(out_dma));
954 if (_ipu_is_ic_chan(in_dma) || _ipu_is_dp_graphic_chan(in_dma)) {
955 ipu->sec_chan_en[IPU_CHAN_ID(channel)] = false;
956 ipu->thrd_chan_en[IPU_CHAN_ID(channel)] = false;
964 ipu->smfc_use_count--;
965 if (ipu->csi_channel[0] == channel) {
966 ipu->csi_channel[0] = CHAN_NONE;
967 } else if (ipu->csi_channel[1] == channel) {
968 ipu->csi_channel[1] = CHAN_NONE;
971 case CSI_PRP_ENC_MEM:
973 if (ipu->using_ic_dirct_ch == CSI_PRP_ENC_MEM)
974 ipu->using_ic_dirct_ch = 0;
975 _ipu_ic_uninit_prpenc(ipu);
976 if (ipu->csi_channel[0] == channel) {
977 ipu->csi_channel[0] = CHAN_NONE;
978 } else if (ipu->csi_channel[1] == channel) {
979 ipu->csi_channel[1] = CHAN_NONE;
984 if (ipu->using_ic_dirct_ch == CSI_PRP_VF_MEM)
985 ipu->using_ic_dirct_ch = 0;
986 _ipu_ic_uninit_prpvf(ipu);
987 if (ipu->csi_channel[0] == channel) {
988 ipu->csi_channel[0] = CHAN_NONE;
989 } else if (ipu->csi_channel[1] == channel) {
990 ipu->csi_channel[1] = CHAN_NONE;
995 _ipu_ic_uninit_prpvf(ipu);
996 reg = ipu_cm_read(ipu, IPU_FS_PROC_FLOW1);
997 ipu_cm_write(ipu, reg & ~FS_VF_IN_VALID, IPU_FS_PROC_FLOW1);
999 case MEM_VDI_PRP_VF_MEM:
1000 ipu->ic_use_count--;
1001 ipu->vdi_use_count--;
1002 if (ipu->using_ic_dirct_ch == MEM_VDI_PRP_VF_MEM)
1003 ipu->using_ic_dirct_ch = 0;
1004 _ipu_ic_uninit_prpvf(ipu);
1005 _ipu_vdi_uninit(ipu);
1006 reg = ipu_cm_read(ipu, IPU_FS_PROC_FLOW1);
1007 ipu_cm_write(ipu, reg & ~FS_VF_IN_VALID, IPU_FS_PROC_FLOW1);
1010 ipu->ic_use_count--;
1011 ipu->vdi_use_count--;
1012 if (ipu->using_ic_dirct_ch == MEM_VDI_MEM)
1013 ipu->using_ic_dirct_ch = 0;
1014 _ipu_ic_uninit_prpvf(ipu);
1015 _ipu_vdi_uninit(ipu);
1017 case MEM_VDI_PRP_VF_MEM_P:
1018 case MEM_VDI_PRP_VF_MEM_N:
1022 case MEM_ROT_VF_MEM:
1023 ipu->rot_use_count--;
1024 ipu->ic_use_count--;
1025 _ipu_ic_uninit_rotate_vf(ipu);
1027 case MEM_PRP_ENC_MEM:
1028 ipu->ic_use_count--;
1029 _ipu_ic_uninit_prpenc(ipu);
1030 reg = ipu_cm_read(ipu, IPU_FS_PROC_FLOW1);
1031 ipu_cm_write(ipu, reg & ~FS_ENC_IN_VALID, IPU_FS_PROC_FLOW1);
1033 case MEM_ROT_ENC_MEM:
1034 ipu->rot_use_count--;
1035 ipu->ic_use_count--;
1036 _ipu_ic_uninit_rotate_enc(ipu);
1039 ipu->ic_use_count--;
1040 _ipu_ic_uninit_pp(ipu);
1042 case MEM_ROT_PP_MEM:
1043 ipu->rot_use_count--;
1044 ipu->ic_use_count--;
1045 _ipu_ic_uninit_rotate_pp(ipu);
1049 _ipu_dc_uninit(ipu, 1);
1050 ipu->di_use_count[ipu->dc_di_assignment[1]]--;
1051 ipu->dc_use_count--;
1052 ipu->dmfc_use_count--;
1056 _ipu_dp_uninit(ipu, channel);
1057 _ipu_dc_uninit(ipu, 5);
1058 ipu->di_use_count[ipu->dc_di_assignment[5]]--;
1059 ipu->dc_use_count--;
1060 ipu->dp_use_count--;
1061 ipu->dmfc_use_count--;
1064 _ipu_dp_uninit(ipu, channel);
1065 ipu->dc_use_count--;
1066 ipu->dp_use_count--;
1067 ipu->dmfc_use_count--;
1071 _ipu_dc_uninit(ipu, 8);
1072 ipu->di_use_count[ipu->dc_di_assignment[8]]--;
1073 ipu->dc_use_count--;
1077 _ipu_dc_uninit(ipu, 9);
1078 ipu->di_use_count[ipu->dc_di_assignment[9]]--;
1079 ipu->dc_use_count--;
1085 if (ipu->ic_use_count == 0)
1086 ipu_conf &= ~IPU_CONF_IC_EN;
1087 if (ipu->vdi_use_count == 0) {
1088 ipu_conf &= ~IPU_CONF_ISP_EN;
1089 ipu_conf &= ~IPU_CONF_VDI_EN;
1090 ipu_conf &= ~IPU_CONF_IC_INPUT;
1092 if (ipu->rot_use_count == 0)
1093 ipu_conf &= ~IPU_CONF_ROT_EN;
1094 if (ipu->dc_use_count == 0)
1095 ipu_conf &= ~IPU_CONF_DC_EN;
1096 if (ipu->dp_use_count == 0)
1097 ipu_conf &= ~IPU_CONF_DP_EN;
1098 if (ipu->dmfc_use_count == 0)
1099 ipu_conf &= ~IPU_CONF_DMFC_EN;
1100 if (ipu->di_use_count[0] == 0) {
1101 ipu_conf &= ~IPU_CONF_DI0_EN;
1103 if (ipu->di_use_count[1] == 0) {
1104 ipu_conf &= ~IPU_CONF_DI1_EN;
1106 if (ipu->smfc_use_count == 0)
1107 ipu_conf &= ~IPU_CONF_SMFC_EN;
1109 ipu_cm_write(ipu, ipu_conf, IPU_CONF);
1111 ipu->channel_init_mask &= ~(1L << IPU_CHAN_ID(channel));
1114 * Disable pixel clk and its parent clock(if the parent clock
1115 * usecount is 1) after clearing DC/DP/DI bits in IPU_CONF
1116 * register to prevent LVDS display channel starvation.
1118 if (_ipu_is_primary_disp_chan(in_dma))
1119 clk_disable_unprepare(ipu->pixel_clk[ipu->dc_di_assignment[dc_chan]]);
1121 mutex_unlock(&ipu->mutex_lock);
1125 ret = pm_runtime_put_sync_suspend(ipu->dev);
1127 dev_err(ipu->dev, "ch = %d, pm_runtime_put failed:%d!\n",
1128 IPU_CHAN_ID(channel), ret);
1132 WARN_ON(ipu->ic_use_count < 0);
1133 WARN_ON(ipu->vdi_use_count < 0);
1134 WARN_ON(ipu->rot_use_count < 0);
1135 WARN_ON(ipu->dc_use_count < 0);
1136 WARN_ON(ipu->dp_use_count < 0);
1137 WARN_ON(ipu->dmfc_use_count < 0);
1138 WARN_ON(ipu->smfc_use_count < 0);
1140 EXPORT_SYMBOL(ipu_uninit_channel);
1143 * This function is called to initialize buffer(s) for logical IPU channel.
1145 * @param ipu ipu handler
1147 * @param channel Input parameter for the logical channel ID.
1149 * @param type Input parameter which buffer to initialize.
1151 * @param pixel_fmt Input parameter for pixel format of buffer.
1152 * Pixel format is a FOURCC ASCII code.
1154 * @param width Input parameter for width of buffer in pixels.
1156 * @param height Input parameter for height of buffer in pixels.
1158 * @param stride Input parameter for stride length of buffer
1161 * @param rot_mode Input parameter for rotation setting of buffer.
1162 * A rotation setting other than
1163 * IPU_ROTATE_VERT_FLIP
1164 * should only be used for input buffers of
1165 * rotation channels.
1167 * @param phyaddr_0 Input parameter buffer 0 physical address.
1169 * @param phyaddr_1 Input parameter buffer 1 physical address.
1170 * Setting this to a value other than NULL enables
1171 * double buffering mode.
1173 * @param phyaddr_2 Input parameter buffer 2 physical address.
1174 * Setting this to a value other than NULL enables
1175 * triple buffering mode, phyaddr_1 should not be
1178 * @param u private u offset for additional cropping,
1181 * @param v private v offset for additional cropping,
1184 * @return Returns 0 on success or negative error code on fail
1186 int32_t ipu_init_channel_buffer(struct ipu_soc *ipu, ipu_channel_t channel,
1189 uint16_t width, uint16_t height,
1191 ipu_rotate_mode_t rot_mode,
1192 dma_addr_t phyaddr_0, dma_addr_t phyaddr_1,
1193 dma_addr_t phyaddr_2,
1194 uint32_t u, uint32_t v)
1198 uint32_t burst_size;
1200 dma_chan = channel_2_dma(channel, type);
1201 if (!idma_is_valid(dma_chan))
1204 if (stride < width * bytes_per_pixel(pixel_fmt))
1205 stride = width * bytes_per_pixel(pixel_fmt);
1209 "Stride not 32-bit aligned, stride = %d\n", stride);
1212 /* IC & IRT channels' width must be multiple of 8 pixels */
1213 if ((_ipu_is_ic_chan(dma_chan) || _ipu_is_irt_chan(dma_chan))
1215 dev_err(ipu->dev, "Width must be 8 pixel multiple\n");
1219 if (_ipu_is_vdi_out_chan(dma_chan) &&
1220 ((width < 16) || (height < 16) || (width % 2) || (height % 4))) {
1221 dev_err(ipu->dev, "vdi width/height limited err\n");
1225 /* IPUv3EX and IPUv3M support triple buffer */
1226 if ((!_ipu_is_trb_chan(dma_chan)) && phyaddr_2) {
1227 dev_err(ipu->dev, "Chan%d doesn't support triple buffer "
1228 "mode\n", dma_chan);
1231 if (!phyaddr_1 && phyaddr_2) {
1232 dev_err(ipu->dev, "Chan%d's buf1 physical addr is NULL for "
1233 "triple buffer mode\n", dma_chan);
1237 mutex_lock(&ipu->mutex_lock);
1239 /* Build parameter memory data for DMA channel */
1240 _ipu_ch_param_init(ipu, dma_chan, pixel_fmt, width, height, stride, u, v, 0,
1241 phyaddr_0, phyaddr_1, phyaddr_2);
1243 /* Set correlative channel parameter of local alpha channel */
1244 if ((_ipu_is_ic_graphic_chan(dma_chan) ||
1245 _ipu_is_dp_graphic_chan(dma_chan)) &&
1246 (ipu->thrd_chan_en[IPU_CHAN_ID(channel)] == true)) {
1247 _ipu_ch_param_set_alpha_use_separate_channel(ipu, dma_chan, true);
1248 _ipu_ch_param_set_alpha_buffer_memory(ipu, dma_chan);
1249 _ipu_ch_param_set_alpha_condition_read(ipu, dma_chan);
1250 /* fix alpha width as 8 and burst size as 16*/
1251 _ipu_ch_params_set_alpha_width(ipu, dma_chan, 8);
1252 _ipu_ch_param_set_burst_size(ipu, dma_chan, 16);
1253 } else if (_ipu_is_ic_graphic_chan(dma_chan) &&
1254 ipu_pixel_format_has_alpha(pixel_fmt))
1255 _ipu_ch_param_set_alpha_use_separate_channel(ipu, dma_chan, false);
1258 _ipu_ch_param_set_rotation(ipu, dma_chan, rot_mode);
1260 /* IC and ROT channels have restriction of 8 or 16 pix burst length */
1261 if (_ipu_is_ic_chan(dma_chan) || _ipu_is_vdi_out_chan(dma_chan)) {
1262 if ((width % 16) == 0)
1263 _ipu_ch_param_set_burst_size(ipu, dma_chan, 16);
1265 _ipu_ch_param_set_burst_size(ipu, dma_chan, 8);
1266 } else if (_ipu_is_irt_chan(dma_chan)) {
1267 _ipu_ch_param_set_burst_size(ipu, dma_chan, 8);
1268 _ipu_ch_param_set_block_mode(ipu, dma_chan);
1269 } else if (_ipu_is_dmfc_chan(dma_chan)) {
1270 burst_size = _ipu_ch_param_get_burst_size(ipu, dma_chan);
1271 _ipu_dmfc_set_wait4eot(ipu, dma_chan, width);
1272 _ipu_dmfc_set_burst_size(ipu, dma_chan, burst_size);
1275 if (_ipu_disp_chan_is_interlaced(ipu, channel) ||
1276 ipu->chan_is_interlaced[dma_chan])
1277 _ipu_ch_param_set_interlaced_scan(ipu, dma_chan);
1279 if (_ipu_is_ic_chan(dma_chan) || _ipu_is_irt_chan(dma_chan) ||
1280 _ipu_is_vdi_out_chan(dma_chan)) {
1281 burst_size = _ipu_ch_param_get_burst_size(ipu, dma_chan);
1282 _ipu_ic_idma_init(ipu, dma_chan, width, height, burst_size,
1284 } else if (_ipu_is_smfc_chan(dma_chan)) {
1285 burst_size = _ipu_ch_param_get_burst_size(ipu, dma_chan);
1287 * This is different from IPUv3 spec, but it is confirmed
1288 * in IPUforum that SMFC burst size should be NPB[6:3]
1289 * when IDMAC works in 16-bit generic data mode.
1291 if (pixel_fmt == IPU_PIX_FMT_GENERIC)
1292 /* 8 bits per pixel */
1293 burst_size = burst_size >> 4;
1294 else if (pixel_fmt == IPU_PIX_FMT_GENERIC_16)
1295 /* 16 bits per pixel */
1296 burst_size = burst_size >> 3;
1298 burst_size = burst_size >> 2;
1299 _ipu_smfc_set_burst_size(ipu, channel, burst_size-1);
1303 if (idma_is_set(ipu, IDMAC_CHA_PRI, dma_chan)) {
1304 unsigned reg = IDMAC_CH_LOCK_EN_1;
1306 if (ipu->pdata->devtype == IPU_V3H) {
1307 _ipu_ch_param_set_axi_id(ipu, dma_chan, 0);
1343 reg = IDMAC_CH_LOCK_EN_2;
1347 reg = IDMAC_CH_LOCK_EN_2;
1351 reg = IDMAC_CH_LOCK_EN_2;
1355 reg = IDMAC_CH_LOCK_EN_2;
1359 reg = IDMAC_CH_LOCK_EN_2;
1363 reg = IDMAC_CH_LOCK_EN_2;
1369 value |= ipu_idmac_read(ipu, reg);
1370 ipu_idmac_write(ipu, value, reg);
1372 _ipu_ch_param_set_axi_id(ipu, dma_chan, 1);
1374 if (ipu->pdata->devtype == IPU_V3H)
1375 _ipu_ch_param_set_axi_id(ipu, dma_chan, 1);
1378 _ipu_ch_param_dump(ipu, dma_chan);
1380 if (phyaddr_2 && g_ipu_hw_rev >= IPU_V3DEX) {
1381 reg = ipu_cm_read(ipu, IPU_CHA_DB_MODE_SEL(dma_chan));
1382 reg &= ~idma_mask(dma_chan);
1383 ipu_cm_write(ipu, reg, IPU_CHA_DB_MODE_SEL(dma_chan));
1385 reg = ipu_cm_read(ipu, IPU_CHA_TRB_MODE_SEL(dma_chan));
1386 reg |= idma_mask(dma_chan);
1387 ipu_cm_write(ipu, reg, IPU_CHA_TRB_MODE_SEL(dma_chan));
1389 /* Set IDMAC third buffer's cpmem number */
1390 /* See __ipu_ch_get_third_buf_cpmem_num() for mapping */
1391 ipu_idmac_write(ipu, 0x00444047L, IDMAC_SUB_ADDR_4);
1392 ipu_idmac_write(ipu, 0x46004241L, IDMAC_SUB_ADDR_3);
1393 ipu_idmac_write(ipu, 0x00000045L, IDMAC_SUB_ADDR_1);
1395 /* Reset to buffer 0 */
1396 ipu_cm_write(ipu, tri_cur_buf_mask(dma_chan),
1397 IPU_CHA_TRIPLE_CUR_BUF(dma_chan));
1399 reg = ipu_cm_read(ipu, IPU_CHA_TRB_MODE_SEL(dma_chan));
1400 reg &= ~idma_mask(dma_chan);
1401 ipu_cm_write(ipu, reg, IPU_CHA_TRB_MODE_SEL(dma_chan));
1403 reg = ipu_cm_read(ipu, IPU_CHA_DB_MODE_SEL(dma_chan));
1405 reg |= idma_mask(dma_chan);
1407 reg &= ~idma_mask(dma_chan);
1408 ipu_cm_write(ipu, reg, IPU_CHA_DB_MODE_SEL(dma_chan));
1410 /* Reset to buffer 0 */
1411 ipu_cm_write(ipu, idma_mask(dma_chan),
1412 IPU_CHA_CUR_BUF(dma_chan));
1416 mutex_unlock(&ipu->mutex_lock);
1420 EXPORT_SYMBOL(ipu_init_channel_buffer);
1423 * This function is called to update the physical address of a buffer for
1424 * a logical IPU channel.
1426 * @param ipu ipu handler
1427 * @param channel Input parameter for the logical channel ID.
1429 * @param type Input parameter which buffer to initialize.
1431 * @param bufNum Input parameter for buffer number to update.
1432 * 0 or 1 are the only valid values.
1434 * @param phyaddr Input parameter buffer physical address.
1436 * @return This function returns 0 on success or negative error code on
1437 * fail. This function will fail if the buffer is set to ready.
1439 int32_t ipu_update_channel_buffer(struct ipu_soc *ipu, ipu_channel_t channel,
1440 ipu_buffer_t type, uint32_t bufNum, dma_addr_t phyaddr)
1444 uint32_t dma_chan = channel_2_dma(channel, type);
1445 unsigned long lock_flags;
1447 if (dma_chan == IDMA_CHAN_INVALID)
1450 spin_lock_irqsave(&ipu->rdy_reg_spin_lock, lock_flags);
1452 reg = ipu_cm_read(ipu, IPU_CHA_BUF0_RDY(dma_chan));
1453 else if (bufNum == 1)
1454 reg = ipu_cm_read(ipu, IPU_CHA_BUF1_RDY(dma_chan));
1456 reg = ipu_cm_read(ipu, IPU_CHA_BUF2_RDY(dma_chan));
1458 if ((reg & idma_mask(dma_chan)) == 0)
1459 _ipu_ch_param_set_buffer(ipu, dma_chan, bufNum, phyaddr);
1462 spin_unlock_irqrestore(&ipu->rdy_reg_spin_lock, lock_flags);
1466 EXPORT_SYMBOL(ipu_update_channel_buffer);
1469 * This function is called to update the band mode setting for
1470 * a logical IPU channel.
1472 * @param ipu ipu handler
1474 * @param channel Input parameter for the logical channel ID.
1476 * @param type Input parameter which buffer to initialize.
1478 * @param band_height Input parameter for band lines:
1479 * shoule be log2(4/8/16/32/64/128/256).
1481 * @return This function returns 0 on success or negative error code on
1484 int32_t ipu_set_channel_bandmode(struct ipu_soc *ipu, ipu_channel_t channel,
1485 ipu_buffer_t type, uint32_t band_height)
1489 uint32_t dma_chan = channel_2_dma(channel, type);
1491 if ((2 > band_height) || (8 < band_height))
1494 mutex_lock(&ipu->mutex_lock);
1496 reg = ipu_idmac_read(ipu, IDMAC_BAND_EN(dma_chan));
1497 reg |= 1 << (dma_chan % 32);
1498 ipu_idmac_write(ipu, reg, IDMAC_BAND_EN(dma_chan));
1500 _ipu_ch_param_set_bandmode(ipu, dma_chan, band_height);
1501 dev_dbg(ipu->dev, "dma_chan:%d, band_height:%d.\n\n",
1502 dma_chan, 1 << band_height);
1503 mutex_unlock(&ipu->mutex_lock);
1507 EXPORT_SYMBOL(ipu_set_channel_bandmode);
1510 * This function is called to initialize a buffer for logical IPU channel.
1512 * @param ipu ipu handler
1513 * @param channel Input parameter for the logical channel ID.
1515 * @param type Input parameter which buffer to initialize.
1517 * @param pixel_fmt Input parameter for pixel format of buffer.
1518 * Pixel format is a FOURCC ASCII code.
1520 * @param width Input parameter for width of buffer in pixels.
1522 * @param height Input parameter for height of buffer in pixels.
1524 * @param stride Input parameter for stride length of buffer
1527 * @param u predefined private u offset for additional cropping,
1530 * @param v predefined private v offset for additional cropping,
1533 * @param vertical_offset vertical offset for Y coordinate
1534 * in the existed frame
1537 * @param horizontal_offset horizontal offset for X coordinate
1538 * in the existed frame
1541 * @return Returns 0 on success or negative error code on fail
1542 * This function will fail if any buffer is set to ready.
1545 int32_t ipu_update_channel_offset(struct ipu_soc *ipu,
1546 ipu_channel_t channel, ipu_buffer_t type,
1548 uint16_t width, uint16_t height,
1550 uint32_t u, uint32_t v,
1551 uint32_t vertical_offset, uint32_t horizontal_offset)
1554 uint32_t dma_chan = channel_2_dma(channel, type);
1555 unsigned long lock_flags;
1557 if (dma_chan == IDMA_CHAN_INVALID)
1560 spin_lock_irqsave(&ipu->rdy_reg_spin_lock, lock_flags);
1561 if ((ipu_cm_read(ipu, IPU_CHA_BUF0_RDY(dma_chan)) & idma_mask(dma_chan)) ||
1562 (ipu_cm_read(ipu, IPU_CHA_BUF1_RDY(dma_chan)) & idma_mask(dma_chan)) ||
1563 ((ipu_cm_read(ipu, IPU_CHA_BUF2_RDY(dma_chan)) & idma_mask(dma_chan)) &&
1564 (ipu_cm_read(ipu, IPU_CHA_TRB_MODE_SEL(dma_chan)) & idma_mask(dma_chan)) &&
1565 _ipu_is_trb_chan(dma_chan)))
1568 _ipu_ch_offset_update(ipu, dma_chan, pixel_fmt, width, height, stride,
1569 u, v, 0, vertical_offset, horizontal_offset);
1570 spin_unlock_irqrestore(&ipu->rdy_reg_spin_lock, lock_flags);
1574 EXPORT_SYMBOL(ipu_update_channel_offset);
1578 * This function is called to set a channel's buffer as ready.
1580 * @param ipu ipu handler
1581 * @param channel Input parameter for the logical channel ID.
1583 * @param type Input parameter which buffer to initialize.
1585 * @param bufNum Input parameter for which buffer number set to
1588 * @return Returns 0 on success or negative error code on fail
1590 int32_t ipu_select_buffer(struct ipu_soc *ipu, ipu_channel_t channel,
1591 ipu_buffer_t type, uint32_t bufNum)
1593 uint32_t dma_chan = channel_2_dma(channel, type);
1594 unsigned long lock_flags;
1596 if (dma_chan == IDMA_CHAN_INVALID)
1599 spin_lock_irqsave(&ipu->rdy_reg_spin_lock, lock_flags);
1600 /* Mark buffer to be ready. */
1602 ipu_cm_write(ipu, idma_mask(dma_chan),
1603 IPU_CHA_BUF0_RDY(dma_chan));
1604 else if (bufNum == 1)
1605 ipu_cm_write(ipu, idma_mask(dma_chan),
1606 IPU_CHA_BUF1_RDY(dma_chan));
1608 ipu_cm_write(ipu, idma_mask(dma_chan),
1609 IPU_CHA_BUF2_RDY(dma_chan));
1610 spin_unlock_irqrestore(&ipu->rdy_reg_spin_lock, lock_flags);
1614 EXPORT_SYMBOL(ipu_select_buffer);
1617 * This function is called to set a channel's buffer as ready.
1619 * @param ipu ipu handler
1620 * @param bufNum Input parameter for which buffer number set to
1623 * @return Returns 0 on success or negative error code on fail
1625 int32_t ipu_select_multi_vdi_buffer(struct ipu_soc *ipu, uint32_t bufNum)
1628 uint32_t dma_chan = channel_2_dma(MEM_VDI_PRP_VF_MEM, IPU_INPUT_BUFFER);
1630 idma_mask(channel_2_dma(MEM_VDI_PRP_VF_MEM_P, IPU_INPUT_BUFFER))|
1631 idma_mask(dma_chan)|
1632 idma_mask(channel_2_dma(MEM_VDI_PRP_VF_MEM_N, IPU_INPUT_BUFFER));
1633 unsigned long lock_flags;
1635 spin_lock_irqsave(&ipu->rdy_reg_spin_lock, lock_flags);
1636 /* Mark buffers to be ready. */
1638 ipu_cm_write(ipu, mask_bit, IPU_CHA_BUF0_RDY(dma_chan));
1640 ipu_cm_write(ipu, mask_bit, IPU_CHA_BUF1_RDY(dma_chan));
1641 spin_unlock_irqrestore(&ipu->rdy_reg_spin_lock, lock_flags);
1645 EXPORT_SYMBOL(ipu_select_multi_vdi_buffer);
1648 static int proc_dest_sel[] = {
1649 0, 1, 1, 3, 5, 5, 4, 7, 8, 9, 10, 11, 12, 14, 15, 16,
1650 0, 1, 1, 5, 5, 5, 5, 5, 7, 8, 9, 10, 11, 12, 14, 31 };
1651 static int proc_src_sel[] = { 0, 6, 7, 6, 7, 8, 5, NA, NA, NA,
1652 NA, NA, NA, NA, NA, 1, 2, 3, 4, 7, 8, NA, 8, NA };
1653 static int disp_src_sel[] = { 0, 6, 7, 8, 3, 4, 5, NA, NA, NA,
1654 NA, NA, NA, NA, NA, 1, NA, 2, NA, 3, 4, 4, 4, 4 };
1658 * This function links 2 channels together for automatic frame
1659 * synchronization. The output of the source channel is linked to the input of
1660 * the destination channel.
1662 * @param ipu ipu handler
1663 * @param src_ch Input parameter for the logical channel ID of
1664 * the source channel.
1666 * @param dest_ch Input parameter for the logical channel ID of
1667 * the destination channel.
1669 * @return This function returns 0 on success or negative error code on
1672 int32_t ipu_link_channels(struct ipu_soc *ipu, ipu_channel_t src_ch, ipu_channel_t dest_ch)
1675 uint32_t fs_proc_flow1;
1676 uint32_t fs_proc_flow2;
1677 uint32_t fs_proc_flow3;
1678 uint32_t fs_disp_flow1;
1680 mutex_lock(&ipu->mutex_lock);
1682 fs_proc_flow1 = ipu_cm_read(ipu, IPU_FS_PROC_FLOW1);
1683 fs_proc_flow2 = ipu_cm_read(ipu, IPU_FS_PROC_FLOW2);
1684 fs_proc_flow3 = ipu_cm_read(ipu, IPU_FS_PROC_FLOW3);
1685 fs_disp_flow1 = ipu_cm_read(ipu, IPU_FS_DISP_FLOW1);
1689 fs_proc_flow3 &= ~FS_SMFC0_DEST_SEL_MASK;
1691 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1692 FS_SMFC0_DEST_SEL_OFFSET;
1695 fs_proc_flow3 &= ~FS_SMFC1_DEST_SEL_MASK;
1697 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1698 FS_SMFC1_DEST_SEL_OFFSET;
1701 fs_proc_flow3 &= ~FS_SMFC2_DEST_SEL_MASK;
1703 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1704 FS_SMFC2_DEST_SEL_OFFSET;
1707 fs_proc_flow3 &= ~FS_SMFC3_DEST_SEL_MASK;
1709 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1710 FS_SMFC3_DEST_SEL_OFFSET;
1712 case CSI_PRP_ENC_MEM:
1713 fs_proc_flow2 &= ~FS_PRPENC_DEST_SEL_MASK;
1715 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1716 FS_PRPENC_DEST_SEL_OFFSET;
1718 case CSI_PRP_VF_MEM:
1719 fs_proc_flow2 &= ~FS_PRPVF_DEST_SEL_MASK;
1721 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1722 FS_PRPVF_DEST_SEL_OFFSET;
1725 fs_proc_flow2 &= ~FS_PP_DEST_SEL_MASK;
1727 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1728 FS_PP_DEST_SEL_OFFSET;
1730 case MEM_ROT_PP_MEM:
1731 fs_proc_flow2 &= ~FS_PP_ROT_DEST_SEL_MASK;
1733 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1734 FS_PP_ROT_DEST_SEL_OFFSET;
1736 case MEM_PRP_ENC_MEM:
1737 fs_proc_flow2 &= ~FS_PRPENC_DEST_SEL_MASK;
1739 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1740 FS_PRPENC_DEST_SEL_OFFSET;
1742 case MEM_ROT_ENC_MEM:
1743 fs_proc_flow2 &= ~FS_PRPENC_ROT_DEST_SEL_MASK;
1745 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1746 FS_PRPENC_ROT_DEST_SEL_OFFSET;
1748 case MEM_PRP_VF_MEM:
1749 fs_proc_flow2 &= ~FS_PRPVF_DEST_SEL_MASK;
1751 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1752 FS_PRPVF_DEST_SEL_OFFSET;
1754 case MEM_VDI_PRP_VF_MEM:
1755 fs_proc_flow2 &= ~FS_PRPVF_DEST_SEL_MASK;
1757 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1758 FS_PRPVF_DEST_SEL_OFFSET;
1760 case MEM_ROT_VF_MEM:
1761 fs_proc_flow2 &= ~FS_PRPVF_ROT_DEST_SEL_MASK;
1763 proc_dest_sel[IPU_CHAN_ID(dest_ch)] <<
1764 FS_PRPVF_ROT_DEST_SEL_OFFSET;
1767 fs_proc_flow3 &= ~FS_VDOA_DEST_SEL_MASK;
1768 if (MEM_VDI_MEM == dest_ch)
1769 fs_proc_flow3 |= FS_VDOA_DEST_SEL_VDI;
1770 else if (MEM_PP_MEM == dest_ch)
1771 fs_proc_flow3 |= FS_VDOA_DEST_SEL_IC;
1784 fs_proc_flow1 &= ~FS_PP_SRC_SEL_MASK;
1785 if (MEM_VDOA_MEM == src_ch)
1786 fs_proc_flow1 |= FS_PP_SRC_SEL_VDOA;
1788 fs_proc_flow1 |= proc_src_sel[IPU_CHAN_ID(src_ch)] <<
1789 FS_PP_SRC_SEL_OFFSET;
1791 case MEM_ROT_PP_MEM:
1792 fs_proc_flow1 &= ~FS_PP_ROT_SRC_SEL_MASK;
1794 proc_src_sel[IPU_CHAN_ID(src_ch)] <<
1795 FS_PP_ROT_SRC_SEL_OFFSET;
1797 case MEM_PRP_ENC_MEM:
1798 fs_proc_flow1 &= ~FS_PRP_SRC_SEL_MASK;
1800 proc_src_sel[IPU_CHAN_ID(src_ch)] << FS_PRP_SRC_SEL_OFFSET;
1802 case MEM_ROT_ENC_MEM:
1803 fs_proc_flow1 &= ~FS_PRPENC_ROT_SRC_SEL_MASK;
1805 proc_src_sel[IPU_CHAN_ID(src_ch)] <<
1806 FS_PRPENC_ROT_SRC_SEL_OFFSET;
1808 case MEM_PRP_VF_MEM:
1809 fs_proc_flow1 &= ~FS_PRP_SRC_SEL_MASK;
1811 proc_src_sel[IPU_CHAN_ID(src_ch)] << FS_PRP_SRC_SEL_OFFSET;
1813 case MEM_VDI_PRP_VF_MEM:
1814 fs_proc_flow1 &= ~FS_PRP_SRC_SEL_MASK;
1816 proc_src_sel[IPU_CHAN_ID(src_ch)] << FS_PRP_SRC_SEL_OFFSET;
1818 case MEM_ROT_VF_MEM:
1819 fs_proc_flow1 &= ~FS_PRPVF_ROT_SRC_SEL_MASK;
1821 proc_src_sel[IPU_CHAN_ID(src_ch)] <<
1822 FS_PRPVF_ROT_SRC_SEL_OFFSET;
1825 fs_disp_flow1 &= ~FS_DC1_SRC_SEL_MASK;
1827 disp_src_sel[IPU_CHAN_ID(src_ch)] << FS_DC1_SRC_SEL_OFFSET;
1830 fs_disp_flow1 &= ~FS_DP_SYNC0_SRC_SEL_MASK;
1832 disp_src_sel[IPU_CHAN_ID(src_ch)] <<
1833 FS_DP_SYNC0_SRC_SEL_OFFSET;
1836 fs_disp_flow1 &= ~FS_DP_SYNC1_SRC_SEL_MASK;
1838 disp_src_sel[IPU_CHAN_ID(src_ch)] <<
1839 FS_DP_SYNC1_SRC_SEL_OFFSET;
1842 fs_disp_flow1 &= ~FS_DC2_SRC_SEL_MASK;
1844 disp_src_sel[IPU_CHAN_ID(src_ch)] << FS_DC2_SRC_SEL_OFFSET;
1847 fs_disp_flow1 &= ~FS_DP_ASYNC0_SRC_SEL_MASK;
1849 disp_src_sel[IPU_CHAN_ID(src_ch)] <<
1850 FS_DP_ASYNC0_SRC_SEL_OFFSET;
1853 fs_disp_flow1 &= ~FS_DP_ASYNC1_SRC_SEL_MASK;
1855 disp_src_sel[IPU_CHAN_ID(src_ch)] <<
1856 FS_DP_ASYNC1_SRC_SEL_OFFSET;
1859 fs_proc_flow1 &= ~FS_VDI_SRC_SEL_MASK;
1860 if (MEM_VDOA_MEM == src_ch)
1861 fs_proc_flow1 |= FS_VDI_SRC_SEL_VDOA;
1872 ipu_cm_write(ipu, fs_proc_flow1, IPU_FS_PROC_FLOW1);
1873 ipu_cm_write(ipu, fs_proc_flow2, IPU_FS_PROC_FLOW2);
1874 ipu_cm_write(ipu, fs_proc_flow3, IPU_FS_PROC_FLOW3);
1875 ipu_cm_write(ipu, fs_disp_flow1, IPU_FS_DISP_FLOW1);
1878 mutex_unlock(&ipu->mutex_lock);
1881 EXPORT_SYMBOL(ipu_link_channels);
1884 * This function unlinks 2 channels and disables automatic frame
1887 * @param ipu ipu handler
1888 * @param src_ch Input parameter for the logical channel ID of
1889 * the source channel.
1891 * @param dest_ch Input parameter for the logical channel ID of
1892 * the destination channel.
1894 * @return This function returns 0 on success or negative error code on
1897 int32_t ipu_unlink_channels(struct ipu_soc *ipu, ipu_channel_t src_ch, ipu_channel_t dest_ch)
1900 uint32_t fs_proc_flow1;
1901 uint32_t fs_proc_flow2;
1902 uint32_t fs_proc_flow3;
1903 uint32_t fs_disp_flow1;
1905 mutex_lock(&ipu->mutex_lock);
1907 fs_proc_flow1 = ipu_cm_read(ipu, IPU_FS_PROC_FLOW1);
1908 fs_proc_flow2 = ipu_cm_read(ipu, IPU_FS_PROC_FLOW2);
1909 fs_proc_flow3 = ipu_cm_read(ipu, IPU_FS_PROC_FLOW3);
1910 fs_disp_flow1 = ipu_cm_read(ipu, IPU_FS_DISP_FLOW1);
1914 fs_proc_flow3 &= ~FS_SMFC0_DEST_SEL_MASK;
1917 fs_proc_flow3 &= ~FS_SMFC1_DEST_SEL_MASK;
1920 fs_proc_flow3 &= ~FS_SMFC2_DEST_SEL_MASK;
1923 fs_proc_flow3 &= ~FS_SMFC3_DEST_SEL_MASK;
1925 case CSI_PRP_ENC_MEM:
1926 fs_proc_flow2 &= ~FS_PRPENC_DEST_SEL_MASK;
1928 case CSI_PRP_VF_MEM:
1929 fs_proc_flow2 &= ~FS_PRPVF_DEST_SEL_MASK;
1932 fs_proc_flow2 &= ~FS_PP_DEST_SEL_MASK;
1934 case MEM_ROT_PP_MEM:
1935 fs_proc_flow2 &= ~FS_PP_ROT_DEST_SEL_MASK;
1937 case MEM_PRP_ENC_MEM:
1938 fs_proc_flow2 &= ~FS_PRPENC_DEST_SEL_MASK;
1940 case MEM_ROT_ENC_MEM:
1941 fs_proc_flow2 &= ~FS_PRPENC_ROT_DEST_SEL_MASK;
1943 case MEM_PRP_VF_MEM:
1944 fs_proc_flow2 &= ~FS_PRPVF_DEST_SEL_MASK;
1946 case MEM_VDI_PRP_VF_MEM:
1947 fs_proc_flow2 &= ~FS_PRPVF_DEST_SEL_MASK;
1949 case MEM_ROT_VF_MEM:
1950 fs_proc_flow2 &= ~FS_PRPVF_ROT_DEST_SEL_MASK;
1953 fs_proc_flow3 &= ~FS_VDOA_DEST_SEL_MASK;
1962 fs_proc_flow1 &= ~FS_PP_SRC_SEL_MASK;
1964 case MEM_ROT_PP_MEM:
1965 fs_proc_flow1 &= ~FS_PP_ROT_SRC_SEL_MASK;
1967 case MEM_PRP_ENC_MEM:
1968 fs_proc_flow1 &= ~FS_PRP_SRC_SEL_MASK;
1970 case MEM_ROT_ENC_MEM:
1971 fs_proc_flow1 &= ~FS_PRPENC_ROT_SRC_SEL_MASK;
1973 case MEM_PRP_VF_MEM:
1974 fs_proc_flow1 &= ~FS_PRP_SRC_SEL_MASK;
1976 case MEM_VDI_PRP_VF_MEM:
1977 fs_proc_flow1 &= ~FS_PRP_SRC_SEL_MASK;
1979 case MEM_ROT_VF_MEM:
1980 fs_proc_flow1 &= ~FS_PRPVF_ROT_SRC_SEL_MASK;
1983 fs_disp_flow1 &= ~FS_DC1_SRC_SEL_MASK;
1986 fs_disp_flow1 &= ~FS_DP_SYNC0_SRC_SEL_MASK;
1989 fs_disp_flow1 &= ~FS_DP_SYNC1_SRC_SEL_MASK;
1992 fs_disp_flow1 &= ~FS_DC2_SRC_SEL_MASK;
1995 fs_disp_flow1 &= ~FS_DP_ASYNC0_SRC_SEL_MASK;
1998 fs_disp_flow1 &= ~FS_DP_ASYNC1_SRC_SEL_MASK;
2001 fs_proc_flow1 &= ~FS_VDI_SRC_SEL_MASK;
2008 ipu_cm_write(ipu, fs_proc_flow1, IPU_FS_PROC_FLOW1);
2009 ipu_cm_write(ipu, fs_proc_flow2, IPU_FS_PROC_FLOW2);
2010 ipu_cm_write(ipu, fs_proc_flow3, IPU_FS_PROC_FLOW3);
2011 ipu_cm_write(ipu, fs_disp_flow1, IPU_FS_DISP_FLOW1);
2014 mutex_unlock(&ipu->mutex_lock);
2017 EXPORT_SYMBOL(ipu_unlink_channels);
2020 * This function check whether a logical channel was enabled.
2022 * @param ipu ipu handler
2023 * @param channel Input parameter for the logical channel ID.
2025 * @return This function returns 1 while request channel is enabled or
2026 * 0 for not enabled.
2028 int32_t ipu_is_channel_busy(struct ipu_soc *ipu, ipu_channel_t channel)
2034 out_dma = channel_2_dma(channel, IPU_OUTPUT_BUFFER);
2035 in_dma = channel_2_dma(channel, IPU_VIDEO_IN_BUFFER);
2037 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(in_dma));
2038 if (reg & idma_mask(in_dma))
2040 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(out_dma));
2041 if (reg & idma_mask(out_dma))
2045 EXPORT_SYMBOL(ipu_is_channel_busy);
2048 * This function enables a logical channel.
2050 * @param ipu ipu handler
2051 * @param channel Input parameter for the logical channel ID.
2053 * @return This function returns 0 on success or negative error code on
2056 int32_t ipu_enable_channel(struct ipu_soc *ipu, ipu_channel_t channel)
2065 mutex_lock(&ipu->mutex_lock);
2067 if (ipu->channel_enable_mask & (1L << IPU_CHAN_ID(channel))) {
2068 dev_err(ipu->dev, "Warning: channel already enabled %d\n",
2069 IPU_CHAN_ID(channel));
2070 mutex_unlock(&ipu->mutex_lock);
2074 /* Get input and output dma channels */
2075 out_dma = channel_2_dma(channel, IPU_OUTPUT_BUFFER);
2076 in_dma = channel_2_dma(channel, IPU_VIDEO_IN_BUFFER);
2078 ipu_conf = ipu_cm_read(ipu, IPU_CONF);
2079 if (ipu->di_use_count[0] > 0) {
2080 ipu_conf |= IPU_CONF_DI0_EN;
2082 if (ipu->di_use_count[1] > 0) {
2083 ipu_conf |= IPU_CONF_DI1_EN;
2085 if (ipu->dp_use_count > 0)
2086 ipu_conf |= IPU_CONF_DP_EN;
2087 if (ipu->dc_use_count > 0)
2088 ipu_conf |= IPU_CONF_DC_EN;
2089 if (ipu->dmfc_use_count > 0)
2090 ipu_conf |= IPU_CONF_DMFC_EN;
2091 if (ipu->ic_use_count > 0)
2092 ipu_conf |= IPU_CONF_IC_EN;
2093 if (ipu->vdi_use_count > 0) {
2094 ipu_conf |= IPU_CONF_ISP_EN;
2095 ipu_conf |= IPU_CONF_VDI_EN;
2096 ipu_conf |= IPU_CONF_IC_INPUT;
2098 if (ipu->rot_use_count > 0)
2099 ipu_conf |= IPU_CONF_ROT_EN;
2100 if (ipu->smfc_use_count > 0)
2101 ipu_conf |= IPU_CONF_SMFC_EN;
2102 ipu_cm_write(ipu, ipu_conf, IPU_CONF);
2104 if (idma_is_valid(in_dma)) {
2105 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(in_dma));
2106 ipu_idmac_write(ipu, reg | idma_mask(in_dma), IDMAC_CHA_EN(in_dma));
2108 if (idma_is_valid(out_dma)) {
2109 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(out_dma));
2110 ipu_idmac_write(ipu, reg | idma_mask(out_dma), IDMAC_CHA_EN(out_dma));
2113 if ((ipu->sec_chan_en[IPU_CHAN_ID(channel)]) &&
2114 ((channel == MEM_PP_MEM) || (channel == MEM_PRP_VF_MEM) ||
2115 (channel == MEM_VDI_PRP_VF_MEM))) {
2116 sec_dma = channel_2_dma(channel, IPU_GRAPH_IN_BUFFER);
2117 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(sec_dma));
2118 ipu_idmac_write(ipu, reg | idma_mask(sec_dma), IDMAC_CHA_EN(sec_dma));
2120 if ((ipu->thrd_chan_en[IPU_CHAN_ID(channel)]) &&
2121 ((channel == MEM_PP_MEM) || (channel == MEM_PRP_VF_MEM))) {
2122 thrd_dma = channel_2_dma(channel, IPU_ALPHA_IN_BUFFER);
2123 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(thrd_dma));
2124 ipu_idmac_write(ipu, reg | idma_mask(thrd_dma), IDMAC_CHA_EN(thrd_dma));
2126 sec_dma = channel_2_dma(channel, IPU_GRAPH_IN_BUFFER);
2127 reg = ipu_idmac_read(ipu, IDMAC_SEP_ALPHA);
2128 ipu_idmac_write(ipu, reg | idma_mask(sec_dma), IDMAC_SEP_ALPHA);
2129 } else if ((ipu->thrd_chan_en[IPU_CHAN_ID(channel)]) &&
2130 ((channel == MEM_BG_SYNC) || (channel == MEM_FG_SYNC))) {
2131 thrd_dma = channel_2_dma(channel, IPU_ALPHA_IN_BUFFER);
2132 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(thrd_dma));
2133 ipu_idmac_write(ipu, reg | idma_mask(thrd_dma), IDMAC_CHA_EN(thrd_dma));
2134 reg = ipu_idmac_read(ipu, IDMAC_SEP_ALPHA);
2135 ipu_idmac_write(ipu, reg | idma_mask(in_dma), IDMAC_SEP_ALPHA);
2138 if ((channel == MEM_DC_SYNC) || (channel == MEM_BG_SYNC) ||
2139 (channel == MEM_FG_SYNC)) {
2140 reg = ipu_idmac_read(ipu, IDMAC_WM_EN(in_dma));
2141 ipu_idmac_write(ipu, reg | idma_mask(in_dma), IDMAC_WM_EN(in_dma));
2143 _ipu_dp_dc_enable(ipu, channel);
2146 if (_ipu_is_ic_chan(in_dma) || _ipu_is_ic_chan(out_dma) ||
2147 _ipu_is_irt_chan(in_dma) || _ipu_is_irt_chan(out_dma) ||
2148 _ipu_is_vdi_out_chan(out_dma))
2149 _ipu_ic_enable_task(ipu, channel);
2151 ipu->channel_enable_mask |= 1L << IPU_CHAN_ID(channel);
2153 mutex_unlock(&ipu->mutex_lock);
2157 EXPORT_SYMBOL(ipu_enable_channel);
2160 * This function check buffer ready for a logical channel.
2162 * @param ipu ipu handler
2163 * @param channel Input parameter for the logical channel ID.
2165 * @param type Input parameter which buffer to clear.
2167 * @param bufNum Input parameter for which buffer number clear
2171 int32_t ipu_check_buffer_ready(struct ipu_soc *ipu, ipu_channel_t channel, ipu_buffer_t type,
2174 uint32_t dma_chan = channel_2_dma(channel, type);
2176 unsigned long lock_flags;
2178 if (dma_chan == IDMA_CHAN_INVALID)
2181 spin_lock_irqsave(&ipu->rdy_reg_spin_lock, lock_flags);
2183 reg = ipu_cm_read(ipu, IPU_CHA_BUF0_RDY(dma_chan));
2184 else if (bufNum == 1)
2185 reg = ipu_cm_read(ipu, IPU_CHA_BUF1_RDY(dma_chan));
2187 reg = ipu_cm_read(ipu, IPU_CHA_BUF2_RDY(dma_chan));
2188 spin_unlock_irqrestore(&ipu->rdy_reg_spin_lock, lock_flags);
2190 if (reg & idma_mask(dma_chan))
2195 EXPORT_SYMBOL(ipu_check_buffer_ready);
2198 * This function clear buffer ready for a logical channel.
2200 * @param ipu ipu handler
2201 * @param channel Input parameter for the logical channel ID.
2203 * @param type Input parameter which buffer to clear.
2205 * @param bufNum Input parameter for which buffer number clear
2209 void _ipu_clear_buffer_ready(struct ipu_soc *ipu, ipu_channel_t channel, ipu_buffer_t type,
2212 uint32_t dma_ch = channel_2_dma(channel, type);
2214 if (!idma_is_valid(dma_ch))
2217 ipu_cm_write(ipu, 0xF0300000, IPU_GPR); /* write one to clear */
2219 ipu_cm_write(ipu, idma_mask(dma_ch),
2220 IPU_CHA_BUF0_RDY(dma_ch));
2221 else if (bufNum == 1)
2222 ipu_cm_write(ipu, idma_mask(dma_ch),
2223 IPU_CHA_BUF1_RDY(dma_ch));
2225 ipu_cm_write(ipu, idma_mask(dma_ch),
2226 IPU_CHA_BUF2_RDY(dma_ch));
2227 ipu_cm_write(ipu, 0x0, IPU_GPR); /* write one to set */
2230 void ipu_clear_buffer_ready(struct ipu_soc *ipu, ipu_channel_t channel, ipu_buffer_t type,
2233 unsigned long lock_flags;
2235 spin_lock_irqsave(&ipu->rdy_reg_spin_lock, lock_flags);
2236 _ipu_clear_buffer_ready(ipu, channel, type, bufNum);
2237 spin_unlock_irqrestore(&ipu->rdy_reg_spin_lock, lock_flags);
2239 EXPORT_SYMBOL(ipu_clear_buffer_ready);
2242 * This function disables a logical channel.
2244 * @param ipu ipu handler
2245 * @param channel Input parameter for the logical channel ID.
2247 * @param wait_for_stop Flag to set whether to wait for channel end
2248 * of frame or return immediately.
2250 * @return This function returns 0 on success or negative error code on
2253 int32_t ipu_disable_channel(struct ipu_soc *ipu, ipu_channel_t channel, bool wait_for_stop)
2258 uint32_t sec_dma = NO_DMA;
2259 uint32_t thrd_dma = NO_DMA;
2260 uint16_t fg_pos_x, fg_pos_y;
2261 unsigned long lock_flags;
2263 mutex_lock(&ipu->mutex_lock);
2265 if ((ipu->channel_enable_mask & (1L << IPU_CHAN_ID(channel))) == 0) {
2266 dev_dbg(ipu->dev, "Channel already disabled %d\n",
2267 IPU_CHAN_ID(channel));
2268 mutex_unlock(&ipu->mutex_lock);
2272 /* Get input and output dma channels */
2273 out_dma = channel_2_dma(channel, IPU_OUTPUT_BUFFER);
2274 in_dma = channel_2_dma(channel, IPU_VIDEO_IN_BUFFER);
2276 if ((idma_is_valid(in_dma) &&
2277 !idma_is_set(ipu, IDMAC_CHA_EN, in_dma))
2278 && (idma_is_valid(out_dma) &&
2279 !idma_is_set(ipu, IDMAC_CHA_EN, out_dma))) {
2280 mutex_unlock(&ipu->mutex_lock);
2284 if (ipu->sec_chan_en[IPU_CHAN_ID(channel)])
2285 sec_dma = channel_2_dma(channel, IPU_GRAPH_IN_BUFFER);
2286 if (ipu->thrd_chan_en[IPU_CHAN_ID(channel)]) {
2287 sec_dma = channel_2_dma(channel, IPU_GRAPH_IN_BUFFER);
2288 thrd_dma = channel_2_dma(channel, IPU_ALPHA_IN_BUFFER);
2291 if ((channel == MEM_BG_SYNC) || (channel == MEM_FG_SYNC) ||
2292 (channel == MEM_DC_SYNC)) {
2293 if (channel == MEM_FG_SYNC) {
2294 _ipu_disp_get_window_pos(ipu, channel, &fg_pos_x, &fg_pos_y);
2295 _ipu_disp_set_window_pos(ipu, channel, 0, 0);
2298 _ipu_dp_dc_disable(ipu, channel, false);
2301 * wait for BG channel EOF then disable FG-IDMAC,
2302 * it avoid FG NFB4EOF error.
2304 if ((channel == MEM_FG_SYNC) && (ipu_is_channel_busy(ipu, MEM_BG_SYNC))) {
2307 ipu_cm_write(ipu, IPUIRQ_2_MASK(IPU_IRQ_BG_SYNC_EOF),
2308 IPUIRQ_2_STATREG(IPU_IRQ_BG_SYNC_EOF));
2309 while ((ipu_cm_read(ipu, IPUIRQ_2_STATREG(IPU_IRQ_BG_SYNC_EOF)) &
2310 IPUIRQ_2_MASK(IPU_IRQ_BG_SYNC_EOF)) == 0) {
2314 dev_err(ipu->dev, "warning: wait for bg sync eof timeout\n");
2319 } else if (wait_for_stop && !_ipu_is_smfc_chan(out_dma) &&
2320 channel != CSI_PRP_VF_MEM && channel != CSI_PRP_ENC_MEM) {
2321 while (idma_is_set(ipu, IDMAC_CHA_BUSY, in_dma) ||
2322 idma_is_set(ipu, IDMAC_CHA_BUSY, out_dma) ||
2323 (ipu->sec_chan_en[IPU_CHAN_ID(channel)] &&
2324 idma_is_set(ipu, IDMAC_CHA_BUSY, sec_dma)) ||
2325 (ipu->thrd_chan_en[IPU_CHAN_ID(channel)] &&
2326 idma_is_set(ipu, IDMAC_CHA_BUSY, thrd_dma))) {
2327 uint32_t irq = 0xffffffff;
2328 int timeout = 50000;
2330 if (idma_is_set(ipu, IDMAC_CHA_BUSY, out_dma))
2332 if (ipu->sec_chan_en[IPU_CHAN_ID(channel)] &&
2333 idma_is_set(ipu, IDMAC_CHA_BUSY, sec_dma))
2335 if (ipu->thrd_chan_en[IPU_CHAN_ID(channel)] &&
2336 idma_is_set(ipu, IDMAC_CHA_BUSY, thrd_dma))
2338 if (idma_is_set(ipu, IDMAC_CHA_BUSY, in_dma))
2341 if (irq == 0xffffffff) {
2342 dev_dbg(ipu->dev, "warning: no channel busy, break\n");
2346 ipu_cm_write(ipu, IPUIRQ_2_MASK(irq),
2347 IPUIRQ_2_STATREG(irq));
2349 dev_dbg(ipu->dev, "warning: channel %d busy, need wait\n", irq);
2351 while (((ipu_cm_read(ipu, IPUIRQ_2_STATREG(irq))
2352 & IPUIRQ_2_MASK(irq)) == 0) &&
2353 (idma_is_set(ipu, IDMAC_CHA_BUSY, irq))) {
2357 ipu_dump_registers(ipu);
2358 dev_err(ipu->dev, "warning: disable ipu dma channel %d during its busy state\n", irq);
2362 dev_dbg(ipu->dev, "wait_time:%d\n", 50000 - timeout);
2367 if ((channel == MEM_BG_SYNC) || (channel == MEM_FG_SYNC) ||
2368 (channel == MEM_DC_SYNC)) {
2369 reg = ipu_idmac_read(ipu, IDMAC_WM_EN(in_dma));
2370 ipu_idmac_write(ipu, reg & ~idma_mask(in_dma), IDMAC_WM_EN(in_dma));
2373 /* Disable IC task */
2374 if (_ipu_is_ic_chan(in_dma) || _ipu_is_ic_chan(out_dma) ||
2375 _ipu_is_irt_chan(in_dma) || _ipu_is_irt_chan(out_dma) ||
2376 _ipu_is_vdi_out_chan(out_dma))
2377 _ipu_ic_disable_task(ipu, channel);
2379 /* Disable DMA channel(s) */
2380 if (idma_is_valid(in_dma)) {
2381 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(in_dma));
2382 ipu_idmac_write(ipu, reg & ~idma_mask(in_dma), IDMAC_CHA_EN(in_dma));
2383 ipu_cm_write(ipu, idma_mask(in_dma), IPU_CHA_CUR_BUF(in_dma));
2384 ipu_cm_write(ipu, tri_cur_buf_mask(in_dma),
2385 IPU_CHA_TRIPLE_CUR_BUF(in_dma));
2387 if (idma_is_valid(out_dma)) {
2388 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(out_dma));
2389 ipu_idmac_write(ipu, reg & ~idma_mask(out_dma), IDMAC_CHA_EN(out_dma));
2390 ipu_cm_write(ipu, idma_mask(out_dma), IPU_CHA_CUR_BUF(out_dma));
2391 ipu_cm_write(ipu, tri_cur_buf_mask(out_dma),
2392 IPU_CHA_TRIPLE_CUR_BUF(out_dma));
2394 if (ipu->sec_chan_en[IPU_CHAN_ID(channel)] && idma_is_valid(sec_dma)) {
2395 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(sec_dma));
2396 ipu_idmac_write(ipu, reg & ~idma_mask(sec_dma), IDMAC_CHA_EN(sec_dma));
2397 ipu_cm_write(ipu, idma_mask(sec_dma), IPU_CHA_CUR_BUF(sec_dma));
2399 if (ipu->thrd_chan_en[IPU_CHAN_ID(channel)] && idma_is_valid(thrd_dma)) {
2400 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(thrd_dma));
2401 ipu_idmac_write(ipu, reg & ~idma_mask(thrd_dma), IDMAC_CHA_EN(thrd_dma));
2402 if (channel == MEM_BG_SYNC || channel == MEM_FG_SYNC) {
2403 reg = ipu_idmac_read(ipu, IDMAC_SEP_ALPHA);
2404 ipu_idmac_write(ipu, reg & ~idma_mask(in_dma), IDMAC_SEP_ALPHA);
2406 reg = ipu_idmac_read(ipu, IDMAC_SEP_ALPHA);
2407 ipu_idmac_write(ipu, reg & ~idma_mask(sec_dma), IDMAC_SEP_ALPHA);
2409 ipu_cm_write(ipu, idma_mask(thrd_dma), IPU_CHA_CUR_BUF(thrd_dma));
2412 if (channel == MEM_FG_SYNC)
2413 _ipu_disp_set_window_pos(ipu, channel, fg_pos_x, fg_pos_y);
2415 spin_lock_irqsave(&ipu->rdy_reg_spin_lock, lock_flags);
2416 /* Set channel buffers NOT to be ready */
2417 if (idma_is_valid(in_dma)) {
2418 _ipu_clear_buffer_ready(ipu, channel, IPU_VIDEO_IN_BUFFER, 0);
2419 _ipu_clear_buffer_ready(ipu, channel, IPU_VIDEO_IN_BUFFER, 1);
2420 _ipu_clear_buffer_ready(ipu, channel, IPU_VIDEO_IN_BUFFER, 2);
2422 if (idma_is_valid(out_dma)) {
2423 _ipu_clear_buffer_ready(ipu, channel, IPU_OUTPUT_BUFFER, 0);
2424 _ipu_clear_buffer_ready(ipu, channel, IPU_OUTPUT_BUFFER, 1);
2426 if (ipu->sec_chan_en[IPU_CHAN_ID(channel)] && idma_is_valid(sec_dma)) {
2427 _ipu_clear_buffer_ready(ipu, channel, IPU_GRAPH_IN_BUFFER, 0);
2428 _ipu_clear_buffer_ready(ipu, channel, IPU_GRAPH_IN_BUFFER, 1);
2430 if (ipu->thrd_chan_en[IPU_CHAN_ID(channel)] && idma_is_valid(thrd_dma)) {
2431 _ipu_clear_buffer_ready(ipu, channel, IPU_ALPHA_IN_BUFFER, 0);
2432 _ipu_clear_buffer_ready(ipu, channel, IPU_ALPHA_IN_BUFFER, 1);
2434 spin_unlock_irqrestore(&ipu->rdy_reg_spin_lock, lock_flags);
2436 ipu->channel_enable_mask &= ~(1L << IPU_CHAN_ID(channel));
2438 mutex_unlock(&ipu->mutex_lock);
2442 EXPORT_SYMBOL(ipu_disable_channel);
2445 * This function enables CSI.
2447 * @param ipu ipu handler
2448 * @param csi csi num 0 or 1
2450 * @return This function returns 0 on success or negative error code on
2453 int32_t ipu_enable_csi(struct ipu_soc *ipu, uint32_t csi)
2458 dev_err(ipu->dev, "Wrong csi num_%d\n", csi);
2463 mutex_lock(&ipu->mutex_lock);
2464 ipu->csi_use_count[csi]++;
2466 if (ipu->csi_use_count[csi] == 1) {
2467 reg = ipu_cm_read(ipu, IPU_CONF);
2469 ipu_cm_write(ipu, reg | IPU_CONF_CSI0_EN, IPU_CONF);
2471 ipu_cm_write(ipu, reg | IPU_CONF_CSI1_EN, IPU_CONF);
2473 mutex_unlock(&ipu->mutex_lock);
2477 EXPORT_SYMBOL(ipu_enable_csi);
2480 * This function disables CSI.
2482 * @param ipu ipu handler
2483 * @param csi csi num 0 or 1
2485 * @return This function returns 0 on success or negative error code on
2488 int32_t ipu_disable_csi(struct ipu_soc *ipu, uint32_t csi)
2493 dev_err(ipu->dev, "Wrong csi num_%d\n", csi);
2497 mutex_lock(&ipu->mutex_lock);
2498 ipu->csi_use_count[csi]--;
2499 if (ipu->csi_use_count[csi] == 0) {
2500 _ipu_csi_wait4eof(ipu, ipu->csi_channel[csi]);
2501 reg = ipu_cm_read(ipu, IPU_CONF);
2503 ipu_cm_write(ipu, reg & ~IPU_CONF_CSI0_EN, IPU_CONF);
2505 ipu_cm_write(ipu, reg & ~IPU_CONF_CSI1_EN, IPU_CONF);
2507 mutex_unlock(&ipu->mutex_lock);
2511 EXPORT_SYMBOL(ipu_disable_csi);
2513 static irqreturn_t ipu_sync_irq_handler(int irq, void *desc)
2515 struct ipu_soc *ipu = desc;
2517 uint32_t line, bit, int_stat, int_ctrl;
2518 irqreturn_t result = IRQ_NONE;
2519 const int int_reg[] = { 1, 2, 3, 4, 11, 12, 13, 14, 15, 0 };
2521 spin_lock(&ipu->int_reg_spin_lock);
2523 for (i = 0; int_reg[i] != 0; i++) {
2524 int_stat = ipu_cm_read(ipu, IPU_INT_STAT(int_reg[i]));
2525 int_ctrl = ipu_cm_read(ipu, IPU_INT_CTRL(int_reg[i]));
2526 int_stat &= int_ctrl;
2527 ipu_cm_write(ipu, int_stat, IPU_INT_STAT(int_reg[i]));
2528 while ((line = ffs(int_stat)) != 0) {
2530 int_stat &= ~(1UL << line);
2531 line += (int_reg[i] - 1) * 32;
2533 ipu->irq_list[line].handler(line,
2534 ipu->irq_list[line].
2536 if (ipu->irq_list[line].flags & IPU_IRQF_ONESHOT) {
2537 int_ctrl &= ~(1UL << bit);
2538 ipu_cm_write(ipu, int_ctrl,
2539 IPU_INT_CTRL(int_reg[i]));
2544 spin_unlock(&ipu->int_reg_spin_lock);
2549 static irqreturn_t ipu_err_irq_handler(int irq, void *desc)
2551 struct ipu_soc *ipu = desc;
2554 const int err_reg[] = { 5, 6, 9, 10, 0 };
2556 spin_lock(&ipu->int_reg_spin_lock);
2558 for (i = 0; err_reg[i] != 0; i++) {
2559 int_stat = ipu_cm_read(ipu, IPU_INT_STAT(err_reg[i]));
2560 int_stat &= ipu_cm_read(ipu, IPU_INT_CTRL(err_reg[i]));
2562 ipu_cm_write(ipu, int_stat, IPU_INT_STAT(err_reg[i]));
2564 "IPU Warning - IPU_INT_STAT_%d = 0x%08X\n",
2565 err_reg[i], int_stat);
2566 /* Disable interrupts so we only get error once */
2567 int_stat = ipu_cm_read(ipu, IPU_INT_CTRL(err_reg[i])) &
2569 ipu_cm_write(ipu, int_stat, IPU_INT_CTRL(err_reg[i]));
2573 spin_unlock(&ipu->int_reg_spin_lock);
2579 * This function enables the interrupt for the specified interrupt line.
2580 * The interrupt lines are defined in \b ipu_irq_line enum.
2582 * @param ipu ipu handler
2583 * @param irq Interrupt line to enable interrupt for.
2585 * @return This function returns 0 on success or negative error code on
2588 int ipu_enable_irq(struct ipu_soc *ipu, uint32_t irq)
2591 unsigned long lock_flags;
2596 spin_lock_irqsave(&ipu->int_reg_spin_lock, lock_flags);
2599 * Check sync interrupt handler only, since we do nothing for
2600 * error interrupts but than print out register values in the
2601 * error interrupt source handler.
2603 if (_ipu_is_sync_irq(irq) && (ipu->irq_list[irq].handler == NULL)) {
2604 dev_err(ipu->dev, "handler hasn't been registered on sync "
2610 reg = ipu_cm_read(ipu, IPUIRQ_2_CTRLREG(irq));
2611 reg |= IPUIRQ_2_MASK(irq);
2612 ipu_cm_write(ipu, reg, IPUIRQ_2_CTRLREG(irq));
2614 spin_unlock_irqrestore(&ipu->int_reg_spin_lock, lock_flags);
2620 EXPORT_SYMBOL(ipu_enable_irq);
2623 * This function disables the interrupt for the specified interrupt line.
2624 * The interrupt lines are defined in \b ipu_irq_line enum.
2626 * @param ipu ipu handler
2627 * @param irq Interrupt line to disable interrupt for.
2630 void ipu_disable_irq(struct ipu_soc *ipu, uint32_t irq)
2633 unsigned long lock_flags;
2637 spin_lock_irqsave(&ipu->int_reg_spin_lock, lock_flags);
2639 reg = ipu_cm_read(ipu, IPUIRQ_2_CTRLREG(irq));
2640 reg &= ~IPUIRQ_2_MASK(irq);
2641 ipu_cm_write(ipu, reg, IPUIRQ_2_CTRLREG(irq));
2643 spin_unlock_irqrestore(&ipu->int_reg_spin_lock, lock_flags);
2647 EXPORT_SYMBOL(ipu_disable_irq);
2650 * This function clears the interrupt for the specified interrupt line.
2651 * The interrupt lines are defined in \b ipu_irq_line enum.
2653 * @param ipu ipu handler
2654 * @param irq Interrupt line to clear interrupt for.
2657 void ipu_clear_irq(struct ipu_soc *ipu, uint32_t irq)
2659 unsigned long lock_flags;
2663 spin_lock_irqsave(&ipu->int_reg_spin_lock, lock_flags);
2665 ipu_cm_write(ipu, IPUIRQ_2_MASK(irq), IPUIRQ_2_STATREG(irq));
2667 spin_unlock_irqrestore(&ipu->int_reg_spin_lock, lock_flags);
2671 EXPORT_SYMBOL(ipu_clear_irq);
2674 * This function returns the current interrupt status for the specified
2675 * interrupt line. The interrupt lines are defined in \b ipu_irq_line enum.
2677 * @param ipu ipu handler
2678 * @param irq Interrupt line to get status for.
2680 * @return Returns true if the interrupt is pending/asserted or false if
2681 * the interrupt is not pending.
2683 bool ipu_get_irq_status(struct ipu_soc *ipu, uint32_t irq)
2686 unsigned long lock_flags;
2690 spin_lock_irqsave(&ipu->int_reg_spin_lock, lock_flags);
2691 reg = ipu_cm_read(ipu, IPUIRQ_2_STATREG(irq));
2692 spin_unlock_irqrestore(&ipu->int_reg_spin_lock, lock_flags);
2696 if (reg & IPUIRQ_2_MASK(irq))
2701 EXPORT_SYMBOL(ipu_get_irq_status);
2704 * This function registers an interrupt handler function for the specified
2705 * interrupt line. The interrupt lines are defined in \b ipu_irq_line enum.
2707 * @param ipu ipu handler
2708 * @param irq Interrupt line to get status for.
2710 * @param handler Input parameter for address of the handler
2713 * @param irq_flags Flags for interrupt mode. Currently not used.
2715 * @param devname Input parameter for string name of driver
2716 * registering the handler.
2718 * @param dev_id Input parameter for pointer of data to be
2719 * passed to the handler.
2721 * @return This function returns 0 on success or negative error code on
2724 int ipu_request_irq(struct ipu_soc *ipu, uint32_t irq,
2725 irqreturn_t(*handler) (int, void *),
2726 uint32_t irq_flags, const char *devname, void *dev_id)
2729 unsigned long lock_flags;
2732 BUG_ON(irq >= IPU_IRQ_COUNT);
2736 spin_lock_irqsave(&ipu->int_reg_spin_lock, lock_flags);
2738 if (ipu->irq_list[irq].handler != NULL) {
2740 "handler already installed on irq %d\n", irq);
2746 * Check sync interrupt handler only, since we do nothing for
2747 * error interrupts but than print out register values in the
2748 * error interrupt source handler.
2750 if (_ipu_is_sync_irq(irq) && (handler == NULL)) {
2751 dev_err(ipu->dev, "handler is NULL for sync irq %d\n", irq);
2756 ipu->irq_list[irq].handler = handler;
2757 ipu->irq_list[irq].flags = irq_flags;
2758 ipu->irq_list[irq].dev_id = dev_id;
2759 ipu->irq_list[irq].name = devname;
2761 /* clear irq stat for previous use */
2762 ipu_cm_write(ipu, IPUIRQ_2_MASK(irq), IPUIRQ_2_STATREG(irq));
2763 /* enable the interrupt */
2764 reg = ipu_cm_read(ipu, IPUIRQ_2_CTRLREG(irq));
2765 reg |= IPUIRQ_2_MASK(irq);
2766 ipu_cm_write(ipu, reg, IPUIRQ_2_CTRLREG(irq));
2768 spin_unlock_irqrestore(&ipu->int_reg_spin_lock, lock_flags);
2774 EXPORT_SYMBOL(ipu_request_irq);
2777 * This function unregisters an interrupt handler for the specified interrupt
2778 * line. The interrupt lines are defined in \b ipu_irq_line enum.
2780 * @param ipu ipu handler
2781 * @param irq Interrupt line to get status for.
2783 * @param dev_id Input parameter for pointer of data to be passed
2784 * to the handler. This must match value passed to
2785 * ipu_request_irq().
2788 void ipu_free_irq(struct ipu_soc *ipu, uint32_t irq, void *dev_id)
2791 unsigned long lock_flags;
2795 spin_lock_irqsave(&ipu->int_reg_spin_lock, lock_flags);
2797 /* disable the interrupt */
2798 reg = ipu_cm_read(ipu, IPUIRQ_2_CTRLREG(irq));
2799 reg &= ~IPUIRQ_2_MASK(irq);
2800 ipu_cm_write(ipu, reg, IPUIRQ_2_CTRLREG(irq));
2801 if (ipu->irq_list[irq].dev_id == dev_id)
2802 memset(&ipu->irq_list[irq], 0, sizeof(ipu->irq_list[irq]));
2804 spin_unlock_irqrestore(&ipu->int_reg_spin_lock, lock_flags);
2808 EXPORT_SYMBOL(ipu_free_irq);
2810 uint32_t ipu_get_cur_buffer_idx(struct ipu_soc *ipu, ipu_channel_t channel, ipu_buffer_t type)
2812 uint32_t reg, dma_chan;
2814 dma_chan = channel_2_dma(channel, type);
2815 if (!idma_is_valid(dma_chan))
2818 reg = ipu_cm_read(ipu, IPU_CHA_TRB_MODE_SEL(dma_chan));
2819 if ((reg & idma_mask(dma_chan)) && _ipu_is_trb_chan(dma_chan)) {
2820 reg = ipu_cm_read(ipu, IPU_CHA_TRIPLE_CUR_BUF(dma_chan));
2821 return (reg & tri_cur_buf_mask(dma_chan)) >>
2822 tri_cur_buf_shift(dma_chan);
2824 reg = ipu_cm_read(ipu, IPU_CHA_CUR_BUF(dma_chan));
2825 if (reg & idma_mask(dma_chan))
2831 EXPORT_SYMBOL(ipu_get_cur_buffer_idx);
2833 uint32_t _ipu_channel_status(struct ipu_soc *ipu, ipu_channel_t channel)
2836 uint32_t task_stat_reg = ipu_cm_read(ipu, IPU_PROC_TASK_STAT);
2839 case MEM_PRP_VF_MEM:
2840 stat = (task_stat_reg & TSTAT_VF_MASK) >> TSTAT_VF_OFFSET;
2842 case MEM_VDI_PRP_VF_MEM:
2843 stat = (task_stat_reg & TSTAT_VF_MASK) >> TSTAT_VF_OFFSET;
2845 case MEM_ROT_VF_MEM:
2847 (task_stat_reg & TSTAT_VF_ROT_MASK) >> TSTAT_VF_ROT_OFFSET;
2849 case MEM_PRP_ENC_MEM:
2850 stat = (task_stat_reg & TSTAT_ENC_MASK) >> TSTAT_ENC_OFFSET;
2852 case MEM_ROT_ENC_MEM:
2854 (task_stat_reg & TSTAT_ENC_ROT_MASK) >>
2855 TSTAT_ENC_ROT_OFFSET;
2858 stat = (task_stat_reg & TSTAT_PP_MASK) >> TSTAT_PP_OFFSET;
2860 case MEM_ROT_PP_MEM:
2862 (task_stat_reg & TSTAT_PP_ROT_MASK) >> TSTAT_PP_ROT_OFFSET;
2866 stat = TASK_STAT_IDLE;
2873 * This function check for a logical channel status
2875 * @param ipu ipu handler
2876 * @param channel Input parameter for the logical channel ID.
2878 * @return This function returns 0 on idle and 1 on busy.
2881 uint32_t ipu_channel_status(struct ipu_soc *ipu, ipu_channel_t channel)
2883 uint32_t dma_status;
2886 mutex_lock(&ipu->mutex_lock);
2887 dma_status = ipu_is_channel_busy(ipu, channel);
2888 mutex_unlock(&ipu->mutex_lock);
2891 dev_dbg(ipu->dev, "%s, dma_status:%d.\n", __func__, dma_status);
2895 EXPORT_SYMBOL(ipu_channel_status);
2897 int32_t ipu_swap_channel(struct ipu_soc *ipu, ipu_channel_t from_ch, ipu_channel_t to_ch)
2900 unsigned long lock_flags;
2901 int from_dma = channel_2_dma(from_ch, IPU_INPUT_BUFFER);
2902 int to_dma = channel_2_dma(to_ch, IPU_INPUT_BUFFER);
2904 mutex_lock(&ipu->mutex_lock);
2906 /* enable target channel */
2907 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(to_dma));
2908 ipu_idmac_write(ipu, reg | idma_mask(to_dma), IDMAC_CHA_EN(to_dma));
2910 ipu->channel_enable_mask |= 1L << IPU_CHAN_ID(to_ch);
2913 _ipu_dp_dc_disable(ipu, from_ch, true);
2915 /* disable source channel */
2916 reg = ipu_idmac_read(ipu, IDMAC_CHA_EN(from_dma));
2917 ipu_idmac_write(ipu, reg & ~idma_mask(from_dma), IDMAC_CHA_EN(from_dma));
2918 ipu_cm_write(ipu, idma_mask(from_dma), IPU_CHA_CUR_BUF(from_dma));
2919 ipu_cm_write(ipu, tri_cur_buf_mask(from_dma),
2920 IPU_CHA_TRIPLE_CUR_BUF(from_dma));
2922 ipu->channel_enable_mask &= ~(1L << IPU_CHAN_ID(from_ch));
2924 spin_lock_irqsave(&ipu->rdy_reg_spin_lock, lock_flags);
2925 _ipu_clear_buffer_ready(ipu, from_ch, IPU_VIDEO_IN_BUFFER, 0);
2926 _ipu_clear_buffer_ready(ipu, from_ch, IPU_VIDEO_IN_BUFFER, 1);
2927 _ipu_clear_buffer_ready(ipu, from_ch, IPU_VIDEO_IN_BUFFER, 2);
2928 spin_unlock_irqrestore(&ipu->rdy_reg_spin_lock, lock_flags);
2930 mutex_unlock(&ipu->mutex_lock);
2934 EXPORT_SYMBOL(ipu_swap_channel);
2936 uint32_t bytes_per_pixel(uint32_t fmt)
2939 case IPU_PIX_FMT_GENERIC: /*generic data */
2940 case IPU_PIX_FMT_RGB332:
2941 case IPU_PIX_FMT_YUV420P:
2942 case IPU_PIX_FMT_YVU420P:
2943 case IPU_PIX_FMT_YUV422P:
2944 case IPU_PIX_FMT_YUV444P:
2947 case IPU_PIX_FMT_GENERIC_16: /* generic data */
2948 case IPU_PIX_FMT_RGB565:
2949 case IPU_PIX_FMT_YUYV:
2950 case IPU_PIX_FMT_UYVY:
2953 case IPU_PIX_FMT_BGR24:
2954 case IPU_PIX_FMT_RGB24:
2955 case IPU_PIX_FMT_YUV444:
2958 case IPU_PIX_FMT_GENERIC_32: /*generic data */
2959 case IPU_PIX_FMT_BGR32:
2960 case IPU_PIX_FMT_BGRA32:
2961 case IPU_PIX_FMT_RGB32:
2962 case IPU_PIX_FMT_RGBA32:
2963 case IPU_PIX_FMT_ABGR32:
2972 EXPORT_SYMBOL(bytes_per_pixel);
2974 ipu_color_space_t format_to_colorspace(uint32_t fmt)
2977 case IPU_PIX_FMT_RGB666:
2978 case IPU_PIX_FMT_RGB565:
2979 case IPU_PIX_FMT_BGR24:
2980 case IPU_PIX_FMT_RGB24:
2981 case IPU_PIX_FMT_GBR24:
2982 case IPU_PIX_FMT_BGR32:
2983 case IPU_PIX_FMT_BGRA32:
2984 case IPU_PIX_FMT_RGB32:
2985 case IPU_PIX_FMT_RGBA32:
2986 case IPU_PIX_FMT_ABGR32:
2987 case IPU_PIX_FMT_LVDS666:
2988 case IPU_PIX_FMT_LVDS888:
2999 bool ipu_pixel_format_has_alpha(uint32_t fmt)
3002 case IPU_PIX_FMT_RGBA32:
3003 case IPU_PIX_FMT_BGRA32:
3004 case IPU_PIX_FMT_ABGR32:
3015 static int ipu_suspend(struct device *dev)
3017 struct ipu_soc *ipu = dev_get_drvdata(dev);
3019 /* All IDMAC channel and IPU clock should be disabled.*/
3023 dev_dbg(dev, "ipu suspend.\n");
3027 static int ipu_resume(struct device *dev)
3029 struct ipu_soc *ipu = dev_get_drvdata(dev);
3031 if (ipu->pdata->pg) {
3035 _ipu_dmfc_init(ipu, dmfc_type_setup, 1);
3036 /* Set sync refresh channels as high priority */
3037 ipu_idmac_write(ipu, 0x18800001L, IDMAC_CHA_PRI(0));
3040 dev_dbg(dev, "ipu resume.\n");
3044 int ipu_runtime_suspend(struct device *dev)
3047 release_bus_freq(BUS_FREQ_HIGH);
3048 dev_dbg(dev, "ipu busfreq high release.\n");
3053 int ipu_runtime_resume(struct device *dev)
3056 request_bus_freq(BUS_FREQ_HIGH);
3057 dev_dbg(dev, "ipu busfreq high requst.\n");
3062 static const struct dev_pm_ops ipu_pm_ops = {
3063 SET_RUNTIME_PM_OPS(ipu_runtime_suspend, ipu_runtime_resume, NULL)
3064 SET_SYSTEM_SLEEP_PM_OPS(ipu_suspend, ipu_resume)
3069 * This structure contains pointers to the power management callback functions.
3071 static struct platform_driver mxcipu_driver = {
3073 .name = "imx-ipuv3",
3074 .of_match_table = imx_ipuv3_dt_ids,
3080 .id_table = imx_ipu_type,
3081 .remove = ipu_remove,
3084 int32_t __init ipu_gen_init(void)
3088 ret = platform_driver_register(&mxcipu_driver);
3092 subsys_initcall(ipu_gen_init);
3094 static void __exit ipu_gen_uninit(void)
3096 platform_driver_unregister(&mxcipu_driver);
3099 module_exit(ipu_gen_uninit);