aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorArchit Taneja <archit@ti.com>2013-12-12 03:35:59 -0500
committerMauro Carvalho Chehab <m.chehab@samsung.com>2014-01-07 03:55:08 -0500
commit773f06577b35f84f84de980e1be3eead8342b5e5 (patch)
tree8592e334514342c4337b8e7e44520f300df7c3eb
parent0df20f9657693c420b10e8d18f1472e0dd47d634 (diff)
[media] v4l: ti-vpe: make vpe driver load scaler coefficients
Make the driver allocate dma buffers to store horizontal and scaler coeffs. Use the scaler library api to choose and copy scaler coefficients to a the above buffers based on the scaling ratio. Since the SC block comes after the de-interlacer, make sure that the source height is doubled if de-interlacer was used. These buffers now need to be used by VPDMA to load the coefficients into the SRAM within SC. In device_run, add configuration descriptors which have payloads pointing to the scaler coefficients in memory. Use the members in sc_data handle to prevent addition of these descriptors if there isn't a need to re-load coefficients into SC. This comes helps unnecessary re-loading of the coefficients when we switch back and forth between vpe contexts. Signed-off-by: Archit Taneja <archit@ti.com> Signed-off-by: Hans Verkuil <hans.verkuil@cisco.com> Signed-off-by: Mauro Carvalho Chehab <m.chehab@samsung.com>
-rw-r--r--drivers/media/platform/ti-vpe/vpe.c47
1 files changed, 46 insertions, 1 deletions
diff --git a/drivers/media/platform/ti-vpe/vpe.c b/drivers/media/platform/ti-vpe/vpe.c
index ecb85f9ae3ab..50d6d0e696e7 100644
--- a/drivers/media/platform/ti-vpe/vpe.c
+++ b/drivers/media/platform/ti-vpe/vpe.c
@@ -356,6 +356,8 @@ struct vpe_ctx {
356 void *mv_buf[2]; /* virtual addrs of motion vector bufs */ 356 void *mv_buf[2]; /* virtual addrs of motion vector bufs */
357 size_t mv_buf_size; /* current motion vector buffer size */ 357 size_t mv_buf_size; /* current motion vector buffer size */
358 struct vpdma_buf mmr_adb; /* shadow reg addr/data block */ 358 struct vpdma_buf mmr_adb; /* shadow reg addr/data block */
359 struct vpdma_buf sc_coeff_h; /* h coeff buffer */
360 struct vpdma_buf sc_coeff_v; /* v coeff buffer */
359 struct vpdma_desc_list desc_list; /* DMA descriptor list */ 361 struct vpdma_desc_list desc_list; /* DMA descriptor list */
360 362
361 bool deinterlacing; /* using de-interlacer */ 363 bool deinterlacing; /* using de-interlacer */
@@ -765,6 +767,10 @@ static int set_srcdst_params(struct vpe_ctx *ctx)
765 struct vpe_q_data *s_q_data = &ctx->q_data[Q_DATA_SRC]; 767 struct vpe_q_data *s_q_data = &ctx->q_data[Q_DATA_SRC];
766 struct vpe_q_data *d_q_data = &ctx->q_data[Q_DATA_DST]; 768 struct vpe_q_data *d_q_data = &ctx->q_data[Q_DATA_DST];
767 struct vpe_mmr_adb *mmr_adb = ctx->mmr_adb.addr; 769 struct vpe_mmr_adb *mmr_adb = ctx->mmr_adb.addr;
770 unsigned int src_w = s_q_data->c_rect.width;
771 unsigned int src_h = s_q_data->c_rect.height;
772 unsigned int dst_w = d_q_data->c_rect.width;
773 unsigned int dst_h = d_q_data->c_rect.height;
768 size_t mv_buf_size; 774 size_t mv_buf_size;
769 int ret; 775 int ret;
770 776
@@ -777,7 +783,6 @@ static int set_srcdst_params(struct vpe_ctx *ctx)
777 const struct vpdma_data_format *mv = 783 const struct vpdma_data_format *mv =
778 &vpdma_misc_fmts[VPDMA_DATA_FMT_MV]; 784 &vpdma_misc_fmts[VPDMA_DATA_FMT_MV];
779 785
780 ctx->deinterlacing = 1;
781 /* 786 /*
782 * we make sure that the source image has a 16 byte aligned 787 * we make sure that the source image has a 16 byte aligned
783 * stride, we need to do the same for the motion vector buffer 788 * stride, we need to do the same for the motion vector buffer
@@ -788,6 +793,9 @@ static int set_srcdst_params(struct vpe_ctx *ctx)
788 bytes_per_line = ALIGN((s_q_data->width * mv->depth) >> 3, 793 bytes_per_line = ALIGN((s_q_data->width * mv->depth) >> 3,
789 VPDMA_STRIDE_ALIGN); 794 VPDMA_STRIDE_ALIGN);
790 mv_buf_size = bytes_per_line * s_q_data->height; 795 mv_buf_size = bytes_per_line * s_q_data->height;
796
797 ctx->deinterlacing = 1;
798 src_h <<= 1;
791 } else { 799 } else {
792 ctx->deinterlacing = 0; 800 ctx->deinterlacing = 0;
793 mv_buf_size = 0; 801 mv_buf_size = 0;
@@ -802,6 +810,8 @@ static int set_srcdst_params(struct vpe_ctx *ctx)
802 set_cfg_and_line_modes(ctx); 810 set_cfg_and_line_modes(ctx);
803 set_dei_regs(ctx); 811 set_dei_regs(ctx);
804 set_csc_coeff_bypass(ctx); 812 set_csc_coeff_bypass(ctx);
813 sc_set_hs_coeffs(ctx->dev->sc, ctx->sc_coeff_h.addr, src_w, dst_w);
814 sc_set_vs_coeffs(ctx->dev->sc, ctx->sc_coeff_v.addr, src_h, dst_h);
805 sc_set_regs_bypass(ctx->dev->sc, &mmr_adb->sc_regs[0]); 815 sc_set_regs_bypass(ctx->dev->sc, &mmr_adb->sc_regs[0]);
806 816
807 return 0; 817 return 0;
@@ -1035,6 +1045,7 @@ static void disable_irqs(struct vpe_ctx *ctx)
1035static void device_run(void *priv) 1045static void device_run(void *priv)
1036{ 1046{
1037 struct vpe_ctx *ctx = priv; 1047 struct vpe_ctx *ctx = priv;
1048 struct sc_data *sc = ctx->dev->sc;
1038 struct vpe_q_data *d_q_data = &ctx->q_data[Q_DATA_DST]; 1049 struct vpe_q_data *d_q_data = &ctx->q_data[Q_DATA_DST];
1039 1050
1040 if (ctx->deinterlacing && ctx->src_vbs[2] == NULL) { 1051 if (ctx->deinterlacing && ctx->src_vbs[2] == NULL) {
@@ -1057,6 +1068,26 @@ static void device_run(void *priv)
1057 ctx->load_mmrs = false; 1068 ctx->load_mmrs = false;
1058 } 1069 }
1059 1070
1071 if (sc->loaded_coeff_h != ctx->sc_coeff_h.dma_addr ||
1072 sc->load_coeff_h) {
1073 vpdma_map_desc_buf(ctx->dev->vpdma, &ctx->sc_coeff_h);
1074 vpdma_add_cfd_block(&ctx->desc_list, CFD_SC_CLIENT,
1075 &ctx->sc_coeff_h, 0);
1076
1077 sc->loaded_coeff_h = ctx->sc_coeff_h.dma_addr;
1078 sc->load_coeff_h = false;
1079 }
1080
1081 if (sc->loaded_coeff_v != ctx->sc_coeff_v.dma_addr ||
1082 sc->load_coeff_v) {
1083 vpdma_map_desc_buf(ctx->dev->vpdma, &ctx->sc_coeff_v);
1084 vpdma_add_cfd_block(&ctx->desc_list, CFD_SC_CLIENT,
1085 &ctx->sc_coeff_v, SC_COEF_SRAM_SIZE >> 4);
1086
1087 sc->loaded_coeff_v = ctx->sc_coeff_v.dma_addr;
1088 sc->load_coeff_v = false;
1089 }
1090
1060 /* output data descriptors */ 1091 /* output data descriptors */
1061 if (ctx->deinterlacing) 1092 if (ctx->deinterlacing)
1062 add_out_dtd(ctx, VPE_PORT_MV_OUT); 1093 add_out_dtd(ctx, VPE_PORT_MV_OUT);
@@ -1180,6 +1211,8 @@ static irqreturn_t vpe_irq(int irq_vpe, void *data)
1180 1211
1181 vpdma_unmap_desc_buf(dev->vpdma, &ctx->desc_list.buf); 1212 vpdma_unmap_desc_buf(dev->vpdma, &ctx->desc_list.buf);
1182 vpdma_unmap_desc_buf(dev->vpdma, &ctx->mmr_adb); 1213 vpdma_unmap_desc_buf(dev->vpdma, &ctx->mmr_adb);
1214 vpdma_unmap_desc_buf(dev->vpdma, &ctx->sc_coeff_h);
1215 vpdma_unmap_desc_buf(dev->vpdma, &ctx->sc_coeff_v);
1183 1216
1184 vpdma_reset_desc_list(&ctx->desc_list); 1217 vpdma_reset_desc_list(&ctx->desc_list);
1185 1218
@@ -1752,6 +1785,14 @@ static int vpe_open(struct file *file)
1752 if (ret != 0) 1785 if (ret != 0)
1753 goto free_desc_list; 1786 goto free_desc_list;
1754 1787
1788 ret = vpdma_alloc_desc_buf(&ctx->sc_coeff_h, SC_COEF_SRAM_SIZE);
1789 if (ret != 0)
1790 goto free_mmr_adb;
1791
1792 ret = vpdma_alloc_desc_buf(&ctx->sc_coeff_v, SC_COEF_SRAM_SIZE);
1793 if (ret != 0)
1794 goto free_sc_h;
1795
1755 init_adb_hdrs(ctx); 1796 init_adb_hdrs(ctx);
1756 1797
1757 v4l2_fh_init(&ctx->fh, video_devdata(file)); 1798 v4l2_fh_init(&ctx->fh, video_devdata(file));
@@ -1820,6 +1861,10 @@ static int vpe_open(struct file *file)
1820exit_fh: 1861exit_fh:
1821 v4l2_ctrl_handler_free(hdl); 1862 v4l2_ctrl_handler_free(hdl);
1822 v4l2_fh_exit(&ctx->fh); 1863 v4l2_fh_exit(&ctx->fh);
1864 vpdma_free_desc_buf(&ctx->sc_coeff_v);
1865free_sc_h:
1866 vpdma_free_desc_buf(&ctx->sc_coeff_h);
1867free_mmr_adb:
1823 vpdma_free_desc_buf(&ctx->mmr_adb); 1868 vpdma_free_desc_buf(&ctx->mmr_adb);
1824free_desc_list: 1869free_desc_list:
1825 vpdma_free_desc_list(&ctx->desc_list); 1870 vpdma_free_desc_list(&ctx->desc_list);